code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { @Override public void append(ILoggingEvent eventObject) { if (isStarted()) { try { clearExpiredLogs(db); SQLiteStatement stmt = db.compileStatement(insertSQL); try { db.beginTransaction(); long eventId = subAppend(eventObject, stmt); if (eventId != -1) { secondarySubAppend(eventObject, eventId); db.setTransactionSuccessful(); } } finally { if (db.inTransaction()) { db.endTransaction(); } stmt.close(); } } catch (Throwable e) { addError("Cannot append event", e); } } } }
public class class_name { @Override public void append(ILoggingEvent eventObject) { if (isStarted()) { try { clearExpiredLogs(db); // depends on control dependency: [try], data = [none] SQLiteStatement stmt = db.compileStatement(insertSQL); try { db.beginTransaction(); // depends on control dependency: [try], data = [none] long eventId = subAppend(eventObject, stmt); if (eventId != -1) { secondarySubAppend(eventObject, eventId); // depends on control dependency: [if], data = [none] db.setTransactionSuccessful(); // depends on control dependency: [if], data = [none] } } finally { if (db.inTransaction()) { db.endTransaction(); // depends on control dependency: [if], data = [none] } stmt.close(); } } catch (Throwable e) { addError("Cannot append event", e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public ThumbnailDownload getThumbnail(String path, ThumbnailSize size, ThumbnailFormat format) { OAuthRequest request = new OAuthRequest(Verb.GET, THUMBNAILS_URL + encode(path)); if (size != THUMBNAIL_SIZE) { request.addQuerystringParameter("size", size.toString()); } if (format != THUMBNAIL_FORMAT) { request.addQuerystringParameter("format", format.toString()); } service.signRequest(accessToken, request); Response response = checkThumbnails(request.send()); return new ThumbnailDownload(response, path, size, format); } }
public class class_name { public ThumbnailDownload getThumbnail(String path, ThumbnailSize size, ThumbnailFormat format) { OAuthRequest request = new OAuthRequest(Verb.GET, THUMBNAILS_URL + encode(path)); if (size != THUMBNAIL_SIZE) { request.addQuerystringParameter("size", size.toString()); // depends on control dependency: [if], data = [none] } if (format != THUMBNAIL_FORMAT) { request.addQuerystringParameter("format", format.toString()); // depends on control dependency: [if], data = [none] } service.signRequest(accessToken, request); Response response = checkThumbnails(request.send()); return new ThumbnailDownload(response, path, size, format); } }
public class class_name { @Nonnull public PSSpan readSpanFromXML (@Nonnull final IMicroElement eSpan) { final PSSpan ret = new PSSpan (); eSpan.forAllAttributes ( (sNS, sAttrName, sVal) -> { final String sAttrValue = _getAttributeValue (sVal); if (sAttrName.equals (CSchematronXML.ATTR_CLASS)) ret.setClazz (sAttrValue); else ret.addForeignAttribute (sAttrName, sAttrValue); }); eSpan.forAllChildren (aSpanChild -> { switch (aSpanChild.getType ()) { case TEXT: ret.addText (((IMicroText) aSpanChild).getNodeValue ()); break; case ELEMENT: final IMicroElement eElement = (IMicroElement) aSpanChild; if (CSchematron.NAMESPACE_SCHEMATRON.equals (eElement.getNamespaceURI ())) { _warn (ret, "Unsupported Schematron element '" + eElement.getLocalName () + "'"); } else ret.addForeignElement (eElement.getClone ()); break; case COMMENT: // Ignore comments break; default: _warn (ret, "Unsupported child node: " + aSpanChild); } }); return ret; } }
public class class_name { @Nonnull public PSSpan readSpanFromXML (@Nonnull final IMicroElement eSpan) { final PSSpan ret = new PSSpan (); eSpan.forAllAttributes ( (sNS, sAttrName, sVal) -> { final String sAttrValue = _getAttributeValue (sVal); if (sAttrName.equals (CSchematronXML.ATTR_CLASS)) ret.setClazz (sAttrValue); else ret.addForeignAttribute (sAttrName, sAttrValue); }); eSpan.forAllChildren (aSpanChild -> { switch (aSpanChild.getType ()) { case TEXT: ret.addText (((IMicroText) aSpanChild).getNodeValue ()); break; case ELEMENT: final IMicroElement eElement = (IMicroElement) aSpanChild; if (CSchematron.NAMESPACE_SCHEMATRON.equals (eElement.getNamespaceURI ())) { _warn (ret, "Unsupported Schematron element '" + eElement.getLocalName () + "'"); // depends on control dependency: [if], data = [none] } else ret.addForeignElement (eElement.getClone ()); break; case COMMENT: // Ignore comments break; default: _warn (ret, "Unsupported child node: " + aSpanChild); } }); return ret; } }
public class class_name { private static boolean checkCollide(Area area, Collidable other) { final List<Rectangle> others = other.getCollisionBounds(); final int size = others.size(); for (int i = 0; i < size; i++) { final Area current = others.get(i); if (area.intersects(current)) { return true; } } return false; } }
public class class_name { private static boolean checkCollide(Area area, Collidable other) { final List<Rectangle> others = other.getCollisionBounds(); final int size = others.size(); for (int i = 0; i < size; i++) { final Area current = others.get(i); if (area.intersects(current)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public Integer getIndent() { if (!"yes".equals(get(OutputKeys.INDENT))) { return null; } String value = get(INDENT_AMT); return (value == null) ? null : Integer.valueOf(value); } }
public class class_name { public Integer getIndent() { if (!"yes".equals(get(OutputKeys.INDENT))) { return null; // depends on control dependency: [if], data = [none] } String value = get(INDENT_AMT); return (value == null) ? null : Integer.valueOf(value); } }
public class class_name { public static void convertRowToBlock( int numRows , int numCols , int blockLength , double[] data, double[] tmp ) { int minLength = Math.min( blockLength , numRows ) * numCols; if( tmp.length < minLength ) { throw new IllegalArgumentException("tmp must be at least "+minLength+" long "); } for( int i = 0; i < numRows; i += blockLength ) { int blockHeight = Math.min( blockLength , numRows - i); System.arraycopy(data,i*numCols,tmp,0,blockHeight*numCols); for( int j = 0; j < numCols; j += blockLength ) { int blockWidth = Math.min( blockLength , numCols - j); int indexDst = i*numCols + blockHeight*j; int indexSrcRow = j; for( int k = 0; k < blockHeight; k++ ) { System.arraycopy(tmp,indexSrcRow,data,indexDst,blockWidth); indexDst += blockWidth; indexSrcRow += numCols; } } } } }
public class class_name { public static void convertRowToBlock( int numRows , int numCols , int blockLength , double[] data, double[] tmp ) { int minLength = Math.min( blockLength , numRows ) * numCols; if( tmp.length < minLength ) { throw new IllegalArgumentException("tmp must be at least "+minLength+" long "); } for( int i = 0; i < numRows; i += blockLength ) { int blockHeight = Math.min( blockLength , numRows - i); System.arraycopy(data,i*numCols,tmp,0,blockHeight*numCols); // depends on control dependency: [for], data = [i] for( int j = 0; j < numCols; j += blockLength ) { int blockWidth = Math.min( blockLength , numCols - j); int indexDst = i*numCols + blockHeight*j; int indexSrcRow = j; for( int k = 0; k < blockHeight; k++ ) { System.arraycopy(tmp,indexSrcRow,data,indexDst,blockWidth); // depends on control dependency: [for], data = [none] indexDst += blockWidth; // depends on control dependency: [for], data = [none] indexSrcRow += numCols; // depends on control dependency: [for], data = [none] } } } } }
public class class_name { public void marshall(DescribeDatasetRequest describeDatasetRequest, ProtocolMarshaller protocolMarshaller) { if (describeDatasetRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeDatasetRequest.getDatasetName(), DATASETNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DescribeDatasetRequest describeDatasetRequest, ProtocolMarshaller protocolMarshaller) { if (describeDatasetRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeDatasetRequest.getDatasetName(), DATASETNAME_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static CollisionCategory imports(Xml root, MapTileCollision map) { Check.notNull(root); Check.notNull(map); final Collection<Xml> children = root.getChildren(TileGroupsConfig.NODE_GROUP); final Collection<CollisionGroup> groups = new ArrayList<>(children.size()); for (final Xml groupNode : children) { final String groupName = groupNode.getText(); final CollisionGroup group = map.getCollisionGroup(groupName); groups.add(group); } final String axisName = root.readString(ATT_AXIS); final Axis axis; try { axis = Axis.valueOf(axisName); } catch (final IllegalArgumentException exception) { throw new LionEngineException(exception, ERROR_AXIS + axisName); } final int x = root.readInteger(ATT_X); final int y = root.readInteger(ATT_Y); final boolean glue = root.readBoolean(true, ATT_GLUE); final String name = root.readString(ATT_NAME); return new CollisionCategory(name, axis, x, y, glue, groups); } }
public class class_name { public static CollisionCategory imports(Xml root, MapTileCollision map) { Check.notNull(root); Check.notNull(map); final Collection<Xml> children = root.getChildren(TileGroupsConfig.NODE_GROUP); final Collection<CollisionGroup> groups = new ArrayList<>(children.size()); for (final Xml groupNode : children) { final String groupName = groupNode.getText(); final CollisionGroup group = map.getCollisionGroup(groupName); groups.add(group); // depends on control dependency: [for], data = [none] } final String axisName = root.readString(ATT_AXIS); final Axis axis; try { axis = Axis.valueOf(axisName); // depends on control dependency: [try], data = [none] } catch (final IllegalArgumentException exception) { throw new LionEngineException(exception, ERROR_AXIS + axisName); } // depends on control dependency: [catch], data = [none] final int x = root.readInteger(ATT_X); final int y = root.readInteger(ATT_Y); final boolean glue = root.readBoolean(true, ATT_GLUE); final String name = root.readString(ATT_NAME); return new CollisionCategory(name, axis, x, y, glue, groups); } }
public class class_name { public Observable<ServiceResponse<VnetGatewayInner>> updateVnetGatewayWithServiceResponseAsync(String resourceGroupName, String name, String vnetName, String gatewayName, VnetGatewayInner connectionEnvelope) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (name == null) { throw new IllegalArgumentException("Parameter name is required and cannot be null."); } if (vnetName == null) { throw new IllegalArgumentException("Parameter vnetName is required and cannot be null."); } if (gatewayName == null) { throw new IllegalArgumentException("Parameter gatewayName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (connectionEnvelope == null) { throw new IllegalArgumentException("Parameter connectionEnvelope is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(connectionEnvelope); return service.updateVnetGateway(resourceGroupName, name, vnetName, gatewayName, this.client.subscriptionId(), connectionEnvelope, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<VnetGatewayInner>>>() { @Override public Observable<ServiceResponse<VnetGatewayInner>> call(Response<ResponseBody> response) { try { ServiceResponse<VnetGatewayInner> clientResponse = updateVnetGatewayDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<VnetGatewayInner>> updateVnetGatewayWithServiceResponseAsync(String resourceGroupName, String name, String vnetName, String gatewayName, VnetGatewayInner connectionEnvelope) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (name == null) { throw new IllegalArgumentException("Parameter name is required and cannot be null."); } if (vnetName == null) { throw new IllegalArgumentException("Parameter vnetName is required and cannot be null."); } if (gatewayName == null) { throw new IllegalArgumentException("Parameter gatewayName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (connectionEnvelope == null) { throw new IllegalArgumentException("Parameter connectionEnvelope is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(connectionEnvelope); return service.updateVnetGateway(resourceGroupName, name, vnetName, gatewayName, this.client.subscriptionId(), connectionEnvelope, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<VnetGatewayInner>>>() { @Override public Observable<ServiceResponse<VnetGatewayInner>> call(Response<ResponseBody> response) { try { ServiceResponse<VnetGatewayInner> clientResponse = updateVnetGatewayDelegate(response); return Observable.just(clientResponse); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public double[][] inverse() { // Build permuted identity matrix efficiently: double[][] b = new double[piv.length][m]; for(int i = 0; i < piv.length; i++) { b[piv[i]][i] = 1.; } return solveInplace(b); } }
public class class_name { public double[][] inverse() { // Build permuted identity matrix efficiently: double[][] b = new double[piv.length][m]; for(int i = 0; i < piv.length; i++) { b[piv[i]][i] = 1.; // depends on control dependency: [for], data = [i] } return solveInplace(b); } }
public class class_name { protected void addFacesListener(FacesListener listener) { if (listener == null) { throw new NullPointerException(); } if (listeners == null) { listeners = new AttachedObjectListHolder<FacesListener>(); } listeners.add(listener); } }
public class class_name { protected void addFacesListener(FacesListener listener) { if (listener == null) { throw new NullPointerException(); } if (listeners == null) { listeners = new AttachedObjectListHolder<FacesListener>(); // depends on control dependency: [if], data = [none] } listeners.add(listener); } }
public class class_name { public void addSeed(String pageUrl, int docId) throws IOException, InterruptedException { String canonicalUrl = URLCanonicalizer.getCanonicalURL(pageUrl); if (canonicalUrl == null) { logger.error("Invalid seed URL: {}", pageUrl); } else { if (docId < 0) { docId = docIdServer.getDocId(canonicalUrl); if (docId > 0) { logger.trace("This URL is already seen."); return; } docId = docIdServer.getNewDocID(canonicalUrl); } else { try { docIdServer.addUrlAndDocId(canonicalUrl, docId); } catch (RuntimeException e) { if (config.isHaltOnError()) { throw e; } else { logger.error("Could not add seed: {}", e.getMessage()); } } } WebURL webUrl = new WebURL(); webUrl.setTldList(tldList); webUrl.setURL(canonicalUrl); webUrl.setDocid(docId); webUrl.setDepth((short) 0); if (robotstxtServer.allows(webUrl)) { frontier.schedule(webUrl); } else { // using the WARN level here, as the user specifically asked to add this seed logger.warn("Robots.txt does not allow this seed: {}", pageUrl); } } } }
public class class_name { public void addSeed(String pageUrl, int docId) throws IOException, InterruptedException { String canonicalUrl = URLCanonicalizer.getCanonicalURL(pageUrl); if (canonicalUrl == null) { logger.error("Invalid seed URL: {}", pageUrl); } else { if (docId < 0) { docId = docIdServer.getDocId(canonicalUrl); if (docId > 0) { logger.trace("This URL is already seen."); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } docId = docIdServer.getNewDocID(canonicalUrl); } else { try { docIdServer.addUrlAndDocId(canonicalUrl, docId); // depends on control dependency: [try], data = [none] } catch (RuntimeException e) { if (config.isHaltOnError()) { throw e; } else { logger.error("Could not add seed: {}", e.getMessage()); // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] } WebURL webUrl = new WebURL(); webUrl.setTldList(tldList); webUrl.setURL(canonicalUrl); webUrl.setDocid(docId); webUrl.setDepth((short) 0); if (robotstxtServer.allows(webUrl)) { frontier.schedule(webUrl); } else { // using the WARN level here, as the user specifically asked to add this seed logger.warn("Robots.txt does not allow this seed: {}", pageUrl); } } } }
public class class_name { public static String getFileName(String filePath) { if (filePath == null) { return filePath; } int index = filePath.lastIndexOf(File.separator); if (index == -1) { return filePath; } return filePath.substring(index + 1); } }
public class class_name { public static String getFileName(String filePath) { if (filePath == null) { return filePath; // depends on control dependency: [if], data = [none] } int index = filePath.lastIndexOf(File.separator); if (index == -1) { return filePath; // depends on control dependency: [if], data = [none] } return filePath.substring(index + 1); } }
public class class_name { private static void tryAutoAttachingWithConnection(SlotReference slot, Client client) throws IOException, InterruptedException, TimeoutException { // Keeps track of the files we might be able to auto-attach, grouped and sorted by the playlist they // were created from, where playlist 0 means all tracks. final Map<Integer, LinkedList<MetadataCache>> candidateGroups = gatherCandidateAttachmentGroups(); MetadataCache match = null; // We will close any non-matched files from the candidateGroups in our finally clause, // but we will leave this one open because we are returning it. try { // Set up a menu request to process each group. for (Map.Entry<Integer,LinkedList<MetadataCache>> entry : candidateGroups.entrySet()) { final LinkedList<MetadataCache> candidates; ArrayList<Integer> tracksToSample; if (client.tryLockingForMenuOperations(MetadataFinder.MENU_TIMEOUT, TimeUnit.SECONDS)) { try { final int playlistId = entry.getKey(); candidates = entry.getValue(); final long count = getTrackCount(slot.slot, client, playlistId); if (count == Message.NO_MENU_RESULTS_AVAILABLE || count == 0) { // No tracks available to match this set of candidates. for (final MetadataCache candidate : candidates) { candidate.close(); } candidates.clear(); } // Filter out any candidates with the wrong number of tracks. final Iterator<MetadataCache> candidateIterator = candidates.iterator(); while (candidateIterator.hasNext()) { final MetadataCache candidate = candidateIterator.next(); if (candidate.trackCount != count) { candidate.close(); candidateIterator.remove(); } } // Bail before querying any metadata if we can already rule out all the candidates. if (candidates.isEmpty()) { continue; } // Gather as many track IDs as we are configured to sample, up to the number available tracksToSample = chooseTrackSample(slot, client, (int) count); } finally { client.unlockForMenuOperations(); } } else { throw new TimeoutException("Unable to lock player for menu operations."); } // Winnow out any auto-attachment candidates that don't match any sampled track for (final int trackId : tracksToSample) { logger.info("Comparing track " + trackId + " with " + candidates.size() + " metadata cache file(s)."); final DataReference reference = new DataReference(slot, trackId); final TrackMetadata track = MetadataFinder.getInstance().queryMetadata(reference, CdjStatus.TrackType.REKORDBOX, client); if (track == null) { logger.warn("Unable to retrieve metadata when attempting cache auto-attach for slot {}, giving up", slot); return; } for (int i = candidates.size() - 1; i >= 0; --i) { final MetadataCache candidate = candidates.get(i); if (!track.equals(candidate.getTrackMetadata(null, reference))) { candidate.close(); candidates.remove(i); } } if (candidates.isEmpty()) { break; // No point sampling more tracks, we have ruled out all candidates in this group. } } if (candidates.isEmpty()) { continue; // This group has failed; move on to the next candidate group, if any. } match = candidates.get(0); // We have found at least one matching cache, use the first. logger.info("Auto-attaching metadata cache " + match.getName() + " to slot " + slot); MetadataFinder.getInstance().attachMetadataCacheInternal(slot, match); return; } } finally { // No matter how we leave this function, close any of the remaining zip files we are not attaching. for (Map.Entry<Integer, LinkedList<MetadataCache>> entry : candidateGroups.entrySet()) { for (MetadataCache candidate : entry.getValue()) { if (candidate != match) { candidate.close(); } } } } } }
public class class_name { private static void tryAutoAttachingWithConnection(SlotReference slot, Client client) throws IOException, InterruptedException, TimeoutException { // Keeps track of the files we might be able to auto-attach, grouped and sorted by the playlist they // were created from, where playlist 0 means all tracks. final Map<Integer, LinkedList<MetadataCache>> candidateGroups = gatherCandidateAttachmentGroups(); MetadataCache match = null; // We will close any non-matched files from the candidateGroups in our finally clause, // but we will leave this one open because we are returning it. try { // Set up a menu request to process each group. for (Map.Entry<Integer,LinkedList<MetadataCache>> entry : candidateGroups.entrySet()) { final LinkedList<MetadataCache> candidates; ArrayList<Integer> tracksToSample; if (client.tryLockingForMenuOperations(MetadataFinder.MENU_TIMEOUT, TimeUnit.SECONDS)) { try { final int playlistId = entry.getKey(); candidates = entry.getValue(); // depends on control dependency: [try], data = [none] final long count = getTrackCount(slot.slot, client, playlistId); if (count == Message.NO_MENU_RESULTS_AVAILABLE || count == 0) { // No tracks available to match this set of candidates. for (final MetadataCache candidate : candidates) { candidate.close(); // depends on control dependency: [for], data = [candidate] } candidates.clear(); // depends on control dependency: [if], data = [none] } // Filter out any candidates with the wrong number of tracks. final Iterator<MetadataCache> candidateIterator = candidates.iterator(); while (candidateIterator.hasNext()) { final MetadataCache candidate = candidateIterator.next(); if (candidate.trackCount != count) { candidate.close(); // depends on control dependency: [if], data = [none] candidateIterator.remove(); // depends on control dependency: [if], data = [none] } } // Bail before querying any metadata if we can already rule out all the candidates. if (candidates.isEmpty()) { continue; } // Gather as many track IDs as we are configured to sample, up to the number available tracksToSample = chooseTrackSample(slot, client, (int) count); // depends on control dependency: [try], data = [none] } finally { client.unlockForMenuOperations(); } } else { throw new TimeoutException("Unable to lock player for menu operations."); } // Winnow out any auto-attachment candidates that don't match any sampled track for (final int trackId : tracksToSample) { logger.info("Comparing track " + trackId + " with " + candidates.size() + " metadata cache file(s)."); // depends on control dependency: [for], data = [trackId] final DataReference reference = new DataReference(slot, trackId); final TrackMetadata track = MetadataFinder.getInstance().queryMetadata(reference, CdjStatus.TrackType.REKORDBOX, client); if (track == null) { logger.warn("Unable to retrieve metadata when attempting cache auto-attach for slot {}, giving up", slot); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } for (int i = candidates.size() - 1; i >= 0; --i) { final MetadataCache candidate = candidates.get(i); if (!track.equals(candidate.getTrackMetadata(null, reference))) { candidate.close(); // depends on control dependency: [if], data = [none] candidates.remove(i); // depends on control dependency: [if], data = [none] } } if (candidates.isEmpty()) { break; // No point sampling more tracks, we have ruled out all candidates in this group. } } if (candidates.isEmpty()) { continue; // This group has failed; move on to the next candidate group, if any. } match = candidates.get(0); // We have found at least one matching cache, use the first. // depends on control dependency: [for], data = [none] logger.info("Auto-attaching metadata cache " + match.getName() + " to slot " + slot); // depends on control dependency: [for], data = [none] MetadataFinder.getInstance().attachMetadataCacheInternal(slot, match); // depends on control dependency: [for], data = [none] return; // depends on control dependency: [for], data = [none] } } finally { // No matter how we leave this function, close any of the remaining zip files we are not attaching. for (Map.Entry<Integer, LinkedList<MetadataCache>> entry : candidateGroups.entrySet()) { for (MetadataCache candidate : entry.getValue()) { if (candidate != match) { candidate.close(); // depends on control dependency: [if], data = [none] } } } } } }
public class class_name { @Override public void cleanSnapshots() { File[] files = getFilesByType(HollowProducer.Blob.Type.SNAPSHOT.prefix); if(files == null || files.length <= numOfSnapshotsToKeep) { return; } sortByLastModified(files); for(int i= numOfSnapshotsToKeep; i < files.length; i++){ File file = files[i]; boolean deleted = file.delete(); if(!deleted) { log.warning("Could not delete snapshot " + file.getPath()); } } } }
public class class_name { @Override public void cleanSnapshots() { File[] files = getFilesByType(HollowProducer.Blob.Type.SNAPSHOT.prefix); if(files == null || files.length <= numOfSnapshotsToKeep) { return; // depends on control dependency: [if], data = [none] } sortByLastModified(files); for(int i= numOfSnapshotsToKeep; i < files.length; i++){ File file = files[i]; boolean deleted = file.delete(); if(!deleted) { log.warning("Could not delete snapshot " + file.getPath()); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public Tunnel getTunnel( String destinationHostname, int destinationPort ) { // might be better to cache, but dont anticipate massive numbers // of tunnel connections... for ( TunnelConnection tunnelConnection : tunnelConnections ) { Tunnel tunnel = tunnelConnection.getTunnel( destinationHostname, destinationPort ); if ( tunnel != null ) { return tunnel; } } return null; } }
public class class_name { public Tunnel getTunnel( String destinationHostname, int destinationPort ) { // might be better to cache, but dont anticipate massive numbers // of tunnel connections... for ( TunnelConnection tunnelConnection : tunnelConnections ) { Tunnel tunnel = tunnelConnection.getTunnel( destinationHostname, destinationPort ); if ( tunnel != null ) { return tunnel; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { private void removeNode(Node node) { result.remove(node); for (Edge edge : node.getUpstream()) { result.remove(edge); } for (Edge edge : node.getDownstream()) { result.remove(edge); } } }
public class class_name { private void removeNode(Node node) { result.remove(node); for (Edge edge : node.getUpstream()) { result.remove(edge); // depends on control dependency: [for], data = [edge] } for (Edge edge : node.getDownstream()) { result.remove(edge); // depends on control dependency: [for], data = [edge] } } }
public class class_name { public static final SerIterator array( final Object[] array, final Class<?> declaredType, final Class<?> valueType) { return new SerIterator() { private int index = -1; @Override public String metaTypeName() { return metaTypeNameBase(valueType); } private String metaTypeNameBase(Class<?> arrayType) { if (arrayType.isArray()) { return metaTypeNameBase(arrayType.getComponentType()) + "[]"; } if (arrayType == Object.class) { return "Object[]"; } if (arrayType == String.class) { return "String[]"; } return arrayType.getName() + "[]"; } @Override public boolean metaTypeRequired() { if (valueType == Object.class) { return Object[].class.isAssignableFrom(declaredType) == false; } if (valueType == String.class) { return String[].class.isAssignableFrom(declaredType) == false; } return true; } @Override public int size() { return array.length; } @Override public boolean hasNext() { return (index + 1) < array.length; } @Override public void next() { index++; } @Override public Class<?> valueType() { return valueType; } @Override public List<Class<?>> valueTypeTypes() { return Collections.emptyList(); } @Override public Object value() { return array[index]; } }; } }
public class class_name { public static final SerIterator array( final Object[] array, final Class<?> declaredType, final Class<?> valueType) { return new SerIterator() { private int index = -1; @Override public String metaTypeName() { return metaTypeNameBase(valueType); } private String metaTypeNameBase(Class<?> arrayType) { if (arrayType.isArray()) { return metaTypeNameBase(arrayType.getComponentType()) + "[]"; // depends on control dependency: [if], data = [none] } if (arrayType == Object.class) { return "Object[]"; } if (arrayType == String.class) { return "String[]"; } return arrayType.getName() + "[]"; } @Override public boolean metaTypeRequired() { if (valueType == Object.class) { return Object[].class.isAssignableFrom(declaredType) == false; // depends on control dependency: [if], data = [none] } if (valueType == String.class) { return String[].class.isAssignableFrom(declaredType) == false; // depends on control dependency: [if], data = [none] } return true; } @Override public int size() { return array.length; } @Override public boolean hasNext() { return (index + 1) < array.length; } @Override public void next() { index++; } @Override public Class<?> valueType() { return valueType; } @Override public List<Class<?>> valueTypeTypes() { return Collections.emptyList(); } @Override public Object value() { return array[index]; } }; } }
public class class_name { public static double Inverse(double y0) { if (y0 <= 0.0) { if (y0 == 0) return Double.NEGATIVE_INFINITY; try { throw new IllegalArgumentException("y0"); } catch (Exception e) { e.printStackTrace(); } } if (y0 >= 1.0) { if (y0 == 1) return Double.POSITIVE_INFINITY; try { throw new IllegalArgumentException("y0"); } catch (Exception e) { e.printStackTrace(); } } double s2pi = Math.sqrt(2.0 * Math.PI); int code = 1; double y = y0; double x; double[] P0 = { -59.963350101410789, 98.001075418599967, -56.676285746907027, 13.931260938727968, -1.2391658386738125 }; double[] Q0 = { 1.9544885833814176, 4.6762791289888153, 86.360242139089053, -225.46268785411937, 200.26021238006066, -82.037225616833339, 15.90562251262117, -1.1833162112133 }; double[] P1 = { 4.0554489230596245, 31.525109459989388, 57.162819224642128, 44.080507389320083, 14.684956192885803, 2.1866330685079025, -0.14025607917135449, -0.035042462682784818, -0.00085745678515468545 }; double[] Q1 = { 15.779988325646675, 45.390763512887922, 41.317203825467203, 15.04253856929075, 2.5046494620830941, -0.14218292285478779, -0.038080640769157827, -0.00093325948089545744 }; double[] P2 = { 3.2377489177694603, 6.9152288906898418, 3.9388102529247444, 1.3330346081580755, 0.20148538954917908, 0.012371663481782003, 0.00030158155350823543, 2.6580697468673755E-06, 6.2397453918498331E-09 }; double[] Q2 = { 6.02427039364742, 3.6798356385616087, 1.3770209948908132, 0.21623699359449663, 0.013420400608854318, 0.00032801446468212774, 2.8924786474538068E-06, 6.7901940800998127E-09 }; if (y > 0.8646647167633873) { y = 1.0 - y; code = 0; } if (y > 0.1353352832366127) { y -= 0.5; double y2 = y * y; x = y + y * ((y2 * Special.Polevl(y2, P0, 4)) / Special.P1evl(y2, Q0, 8)); x *= s2pi; return x; } x = Math.sqrt(-2.0 * Math.log(y)); double x0 = x - Math.log(x) / x; double z = 1.0 / x; double x1; if (x < 8.0) { x1 = (z * Special.Polevl(z, P1, 8)) / Special.P1evl(z, Q1, 8); } else { x1 = (z * Special.Polevl(z, P2, 8)) / Special.P1evl(z, Q2, 8); } x = x0 - x1; if (code != 0) x = -x; return x; } }
public class class_name { public static double Inverse(double y0) { if (y0 <= 0.0) { if (y0 == 0) return Double.NEGATIVE_INFINITY; try { throw new IllegalArgumentException("y0"); } catch (Exception e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } if (y0 >= 1.0) { if (y0 == 1) return Double.POSITIVE_INFINITY; try { throw new IllegalArgumentException("y0"); } catch (Exception e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } double s2pi = Math.sqrt(2.0 * Math.PI); int code = 1; double y = y0; double x; double[] P0 = { -59.963350101410789, 98.001075418599967, -56.676285746907027, 13.931260938727968, -1.2391658386738125 }; double[] Q0 = { 1.9544885833814176, 4.6762791289888153, 86.360242139089053, -225.46268785411937, 200.26021238006066, -82.037225616833339, 15.90562251262117, -1.1833162112133 }; double[] P1 = { 4.0554489230596245, 31.525109459989388, 57.162819224642128, 44.080507389320083, 14.684956192885803, 2.1866330685079025, -0.14025607917135449, -0.035042462682784818, -0.00085745678515468545 }; double[] Q1 = { 15.779988325646675, 45.390763512887922, 41.317203825467203, 15.04253856929075, 2.5046494620830941, -0.14218292285478779, -0.038080640769157827, -0.00093325948089545744 }; double[] P2 = { 3.2377489177694603, 6.9152288906898418, 3.9388102529247444, 1.3330346081580755, 0.20148538954917908, 0.012371663481782003, 0.00030158155350823543, 2.6580697468673755E-06, 6.2397453918498331E-09 }; double[] Q2 = { 6.02427039364742, 3.6798356385616087, 1.3770209948908132, 0.21623699359449663, 0.013420400608854318, 0.00032801446468212774, 2.8924786474538068E-06, 6.7901940800998127E-09 }; if (y > 0.8646647167633873) { y = 1.0 - y; // depends on control dependency: [if], data = [none] code = 0; // depends on control dependency: [if], data = [none] } if (y > 0.1353352832366127) { y -= 0.5; // depends on control dependency: [if], data = [none] double y2 = y * y; x = y + y * ((y2 * Special.Polevl(y2, P0, 4)) / Special.P1evl(y2, Q0, 8)); // depends on control dependency: [if], data = [(y] x *= s2pi; // depends on control dependency: [if], data = [none] return x; // depends on control dependency: [if], data = [none] } x = Math.sqrt(-2.0 * Math.log(y)); double x0 = x - Math.log(x) / x; double z = 1.0 / x; double x1; if (x < 8.0) { x1 = (z * Special.Polevl(z, P1, 8)) / Special.P1evl(z, Q1, 8); // depends on control dependency: [if], data = [none] } else { x1 = (z * Special.Polevl(z, P2, 8)) / Special.P1evl(z, Q2, 8); // depends on control dependency: [if], data = [none] } x = x0 - x1; if (code != 0) x = -x; return x; } }
public class class_name { public void destroy() { if (mContentView != null) { if (mGroupBasicAdapter != null) { mGroupBasicAdapter.destroy(); } mContentView.setAdapter(null); mContentView = null; } TimerSupport timerSupport = getService(TimerSupport.class); if (timerSupport != null) { timerSupport.clear(); } SimpleClickSupport simpleClickSupport = getService(SimpleClickSupport.class); if (simpleClickSupport != null) { simpleClickSupport.destroy(); } ExposureSupport exposureSupport = getService(ExposureSupport.class); if (exposureSupport != null) { exposureSupport.destroy(); } BusSupport busSupport = getService(BusSupport.class); if (busSupport != null) { busSupport.shutdown(); } BannerSupport bannerSupport = getService(BannerSupport.class); if (bannerSupport != null) { bannerSupport.destroy(); } VafContext vafContext = getService(VafContext.class); if (vafContext != null) { vafContext.onDestroy(); } } }
public class class_name { public void destroy() { if (mContentView != null) { if (mGroupBasicAdapter != null) { mGroupBasicAdapter.destroy(); // depends on control dependency: [if], data = [none] } mContentView.setAdapter(null); // depends on control dependency: [if], data = [null)] mContentView = null; // depends on control dependency: [if], data = [none] } TimerSupport timerSupport = getService(TimerSupport.class); if (timerSupport != null) { timerSupport.clear(); // depends on control dependency: [if], data = [none] } SimpleClickSupport simpleClickSupport = getService(SimpleClickSupport.class); if (simpleClickSupport != null) { simpleClickSupport.destroy(); // depends on control dependency: [if], data = [none] } ExposureSupport exposureSupport = getService(ExposureSupport.class); if (exposureSupport != null) { exposureSupport.destroy(); // depends on control dependency: [if], data = [none] } BusSupport busSupport = getService(BusSupport.class); if (busSupport != null) { busSupport.shutdown(); // depends on control dependency: [if], data = [none] } BannerSupport bannerSupport = getService(BannerSupport.class); if (bannerSupport != null) { bannerSupport.destroy(); // depends on control dependency: [if], data = [none] } VafContext vafContext = getService(VafContext.class); if (vafContext != null) { vafContext.onDestroy(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void requestFinished(final String sessionId, final String requestId) { if(!_sticky) { final MemcachedBackupSession msmSession = _manager.getSessionInternal( sessionId ); if ( msmSession == null ) { if(_log.isDebugEnabled()) _log.debug( "No session found in session map for " + sessionId ); return; } if ( !msmSession.isValidInternal() ) { if(_log.isDebugEnabled()) _log.debug( "Non valid session found in session map for " + sessionId ); return; } synchronized (_manager.getSessionsInternal()) { // if another thread in the meantime retrieved the session // we must not remove it as this would case session data loss // for the other request if ( msmSession.releaseReference() > 0 ) { if(_log.isDebugEnabled()) _log.debug( "Session " + sessionId + " is still used by another request, skipping backup and (optional) lock handling/release." ); return; } msmSession.passivate(); _manager.removeInternal( msmSession, false ); } if(msmSession.isLocked()) { _lockingStrategy.releaseLock(sessionId); msmSession.releaseLock(); _lockingStrategy.registerReadonlyRequest(requestId); } } } }
public class class_name { public void requestFinished(final String sessionId, final String requestId) { if(!_sticky) { final MemcachedBackupSession msmSession = _manager.getSessionInternal( sessionId ); if ( msmSession == null ) { if(_log.isDebugEnabled()) _log.debug( "No session found in session map for " + sessionId ); return; // depends on control dependency: [if], data = [none] } if ( !msmSession.isValidInternal() ) { if(_log.isDebugEnabled()) _log.debug( "Non valid session found in session map for " + sessionId ); return; // depends on control dependency: [if], data = [none] } synchronized (_manager.getSessionsInternal()) { // depends on control dependency: [if], data = [none] // if another thread in the meantime retrieved the session // we must not remove it as this would case session data loss // for the other request if ( msmSession.releaseReference() > 0 ) { if(_log.isDebugEnabled()) _log.debug( "Session " + sessionId + " is still used by another request, skipping backup and (optional) lock handling/release." ); return; // depends on control dependency: [if], data = [none] } msmSession.passivate(); _manager.removeInternal( msmSession, false ); } if(msmSession.isLocked()) { _lockingStrategy.releaseLock(sessionId); // depends on control dependency: [if], data = [none] msmSession.releaseLock(); // depends on control dependency: [if], data = [none] _lockingStrategy.registerReadonlyRequest(requestId); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public int getFailOnCVSS() { if (line.hasOption(ARGUMENT.FAIL_ON_CVSS)) { final String value = line.getOptionValue(ARGUMENT.FAIL_ON_CVSS); try { return Integer.parseInt(value); } catch (NumberFormatException nfe) { return 11; } } else { return 11; } } }
public class class_name { public int getFailOnCVSS() { if (line.hasOption(ARGUMENT.FAIL_ON_CVSS)) { final String value = line.getOptionValue(ARGUMENT.FAIL_ON_CVSS); try { return Integer.parseInt(value); // depends on control dependency: [try], data = [none] } catch (NumberFormatException nfe) { return 11; } // depends on control dependency: [catch], data = [none] } else { return 11; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void onAuthenticationFailure(BoxAuthenticationInfo infoOriginal, Exception ex) { String msg = "failure:"; if (getAuthStorage() != null) { msg += "auth storage :" + getAuthStorage().toString(); } BoxAuthenticationInfo info = BoxAuthenticationInfo.unmodifiableObject(infoOriginal); if (info != null) { msg += info.getUser() == null ? "null user" : info.getUser().getId() == null ? "null user id" : info.getUser().getId().length(); } BoxLogUtils.nonFatalE("BoxAuthfail", msg , ex); Set<AuthListener> listeners = getListeners(); for (AuthListener listener : listeners) { listener.onAuthFailure(info, ex); } } }
public class class_name { public void onAuthenticationFailure(BoxAuthenticationInfo infoOriginal, Exception ex) { String msg = "failure:"; if (getAuthStorage() != null) { msg += "auth storage :" + getAuthStorage().toString(); // depends on control dependency: [if], data = [none] } BoxAuthenticationInfo info = BoxAuthenticationInfo.unmodifiableObject(infoOriginal); if (info != null) { msg += info.getUser() == null ? "null user" : info.getUser().getId() == null ? "null user id" : info.getUser().getId().length(); // depends on control dependency: [if], data = [none] } BoxLogUtils.nonFatalE("BoxAuthfail", msg , ex); Set<AuthListener> listeners = getListeners(); for (AuthListener listener : listeners) { listener.onAuthFailure(info, ex); // depends on control dependency: [for], data = [listener] } } }
public class class_name { protected void exportProject(Element parent, CmsProject project) throws CmsImportExportException, SAXException { I_CmsReport report = getReport(); CmsDefaultUsers defaultUsers = OpenCms.getDefaultUsers(); String users; try { users = getCms().readGroup(project.getGroupId()).getName(); } catch (CmsException e) { CmsMessageContainer message = org.opencms.db.Messages.get().container( org.opencms.db.Messages.ERR_READ_GROUP_FOR_ID_1, project.getGroupId()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), e); } users = defaultUsers.getGroupUsers(); report.println(org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_DOTS_0)); report.print(message, I_CmsReport.FORMAT_ERROR); } String managers; try { managers = getCms().readGroup(project.getManagerGroupId()).getName(); } catch (CmsException e) { CmsMessageContainer message = org.opencms.db.Messages.get().container( org.opencms.db.Messages.ERR_READ_GROUP_FOR_ID_1, project.getManagerGroupId()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), e); } managers = defaultUsers.getGroupAdministrators(); report.println(org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_DOTS_0)); report.print(message, I_CmsReport.FORMAT_ERROR); } Element e = parent.addElement(CmsImportVersion10.N_PROJECT); e.addElement(CmsImportVersion10.N_NAME).addText(project.getSimpleName()); e.addElement(CmsImportVersion10.N_DESCRIPTION).addCDATA(project.getDescription()); e.addElement(CmsImportVersion10.N_USERSGROUP).addText(users); e.addElement(CmsImportVersion10.N_MANAGERSGROUP).addText(managers); Element resources = e.addElement(CmsImportVersion10.N_RESOURCES); try { Iterator<String> it = getCms().readProjectResources(project).iterator(); while (it.hasNext()) { String resName = it.next(); resources.addElement(CmsImportVersion10.N_RESOURCE).addText(resName); } } catch (CmsException exc) { CmsMessageContainer message = org.opencms.db.Messages.get().container( org.opencms.db.Messages.ERR_READ_PROJECT_RESOURCES_2, project.getName(), project.getUuid()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), exc); } throw new CmsImportExportException(message, exc); } // write the XML digestElement(parent, e); } }
public class class_name { protected void exportProject(Element parent, CmsProject project) throws CmsImportExportException, SAXException { I_CmsReport report = getReport(); CmsDefaultUsers defaultUsers = OpenCms.getDefaultUsers(); String users; try { users = getCms().readGroup(project.getGroupId()).getName(); } catch (CmsException e) { CmsMessageContainer message = org.opencms.db.Messages.get().container( org.opencms.db.Messages.ERR_READ_GROUP_FOR_ID_1, project.getGroupId()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), e); // depends on control dependency: [if], data = [none] } users = defaultUsers.getGroupUsers(); report.println(org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_DOTS_0)); report.print(message, I_CmsReport.FORMAT_ERROR); } String managers; try { managers = getCms().readGroup(project.getManagerGroupId()).getName(); } catch (CmsException e) { CmsMessageContainer message = org.opencms.db.Messages.get().container( org.opencms.db.Messages.ERR_READ_GROUP_FOR_ID_1, project.getManagerGroupId()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), e); // depends on control dependency: [if], data = [none] } managers = defaultUsers.getGroupAdministrators(); report.println(org.opencms.report.Messages.get().container(org.opencms.report.Messages.RPT_DOTS_0)); report.print(message, I_CmsReport.FORMAT_ERROR); } Element e = parent.addElement(CmsImportVersion10.N_PROJECT); e.addElement(CmsImportVersion10.N_NAME).addText(project.getSimpleName()); e.addElement(CmsImportVersion10.N_DESCRIPTION).addCDATA(project.getDescription()); e.addElement(CmsImportVersion10.N_USERSGROUP).addText(users); e.addElement(CmsImportVersion10.N_MANAGERSGROUP).addText(managers); Element resources = e.addElement(CmsImportVersion10.N_RESOURCES); try { Iterator<String> it = getCms().readProjectResources(project).iterator(); while (it.hasNext()) { String resName = it.next(); resources.addElement(CmsImportVersion10.N_RESOURCE).addText(resName); // depends on control dependency: [while], data = [none] } } catch (CmsException exc) { CmsMessageContainer message = org.opencms.db.Messages.get().container( org.opencms.db.Messages.ERR_READ_PROJECT_RESOURCES_2, project.getName(), project.getUuid()); if (LOG.isDebugEnabled()) { LOG.debug(message.key(), exc); // depends on control dependency: [if], data = [none] } throw new CmsImportExportException(message, exc); } // write the XML digestElement(parent, e); } }
public class class_name { public static double elementMin( DMatrixSparseCSC A ) { if( A.nz_length == 0) return 0; // if every element is assigned a value then the first element can be a minimum. // Otherwise zero needs to be considered double min = A.isFull() ? A.nz_values[0] : 0; for(int i = 0; i < A.nz_length; i++ ) { double val = A.nz_values[i]; if( val < min ) { min = val; } } return min; } }
public class class_name { public static double elementMin( DMatrixSparseCSC A ) { if( A.nz_length == 0) return 0; // if every element is assigned a value then the first element can be a minimum. // Otherwise zero needs to be considered double min = A.isFull() ? A.nz_values[0] : 0; for(int i = 0; i < A.nz_length; i++ ) { double val = A.nz_values[i]; if( val < min ) { min = val; // depends on control dependency: [if], data = [none] } } return min; } }
public class class_name { @CheckResult @Override public boolean shareTraces(String fullTraces) { try { shareTracesInternal(fullTraces); return true; } catch (RuntimeException exception1) { // Likely cause is a TransactionTooLargeException on API levels 15+. try { /* * Limit trace size to between 100kB and 400kB, since Unicode characters can be 1-4 bytes each. */ int fullTracesLength = fullTraces.length(); String truncatedTraces = fullTraces.substring(Math.max(0, fullTracesLength - 100000), fullTracesLength); shareTracesInternal(truncatedTraces); return true; } catch (RuntimeException exception2) { // Likely cause is a TransactionTooLargeException on API levels 15+. return false; } } } }
public class class_name { @CheckResult @Override public boolean shareTraces(String fullTraces) { try { shareTracesInternal(fullTraces); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } catch (RuntimeException exception1) { // Likely cause is a TransactionTooLargeException on API levels 15+. try { /* * Limit trace size to between 100kB and 400kB, since Unicode characters can be 1-4 bytes each. */ int fullTracesLength = fullTraces.length(); String truncatedTraces = fullTraces.substring(Math.max(0, fullTracesLength - 100000), fullTracesLength); shareTracesInternal(truncatedTraces); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } catch (RuntimeException exception2) { // Likely cause is a TransactionTooLargeException on API levels 15+. return false; } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void real( TupleDesc_F64 input , TupleDesc_S8 output ) { double max = 0; for( int i = 0; i < input.size(); i++ ) { double v = Math.abs(input.value[i]); if( v > max ) max = v; } for( int i = 0; i < input.size(); i++ ) { output.value[i] = (byte)(127.0*input.value[i]/max); } } }
public class class_name { public static void real( TupleDesc_F64 input , TupleDesc_S8 output ) { double max = 0; for( int i = 0; i < input.size(); i++ ) { double v = Math.abs(input.value[i]); if( v > max ) max = v; } for( int i = 0; i < input.size(); i++ ) { output.value[i] = (byte)(127.0*input.value[i]/max); // depends on control dependency: [for], data = [i] } } }
public class class_name { public static base_responses delete(nitro_service client, String ipv6address[]) throws Exception { base_responses result = null; if (ipv6address != null && ipv6address.length > 0) { nsip6 deleteresources[] = new nsip6[ipv6address.length]; for (int i=0;i<ipv6address.length;i++){ deleteresources[i] = new nsip6(); deleteresources[i].ipv6address = ipv6address[i]; } result = delete_bulk_request(client, deleteresources); } return result; } }
public class class_name { public static base_responses delete(nitro_service client, String ipv6address[]) throws Exception { base_responses result = null; if (ipv6address != null && ipv6address.length > 0) { nsip6 deleteresources[] = new nsip6[ipv6address.length]; for (int i=0;i<ipv6address.length;i++){ deleteresources[i] = new nsip6(); // depends on control dependency: [for], data = [i] deleteresources[i].ipv6address = ipv6address[i]; // depends on control dependency: [for], data = [i] } result = delete_bulk_request(client, deleteresources); } return result; } }
public class class_name { protected void orderNode(SquareNode target, SquareNode node, boolean pointingX) { int index0 = findIntersection(target,node); int index1 = (index0+1)%4; int index2 = (index0+2)%4; int index3 = (index0+3)%4; if( index0 < 0 ) throw new RuntimeException("Couldn't find intersection. Probable bug"); lineCenters.a = target.center; lineCenters.b = node.center; UtilLine2D_F64.convert(lineCenters,general); Polygon2D_F64 poly = target.square; if( pointingX ) { if (sign(general, poly.get(index0)) > 0) { ordered[1] = poly.get(index1); ordered[2] = poly.get(index0); } else { ordered[1] = poly.get(index0); ordered[2] = poly.get(index1); } if (sign(general, poly.get(index2)) > 0) { ordered[3] = poly.get(index2); ordered[0] = poly.get(index3); } else { ordered[3] = poly.get(index3); ordered[0] = poly.get(index2); } } else { if (sign(general, poly.get(index0)) > 0) { ordered[2] = poly.get(index1); ordered[3] = poly.get(index0); } else { ordered[2] = poly.get(index0); ordered[3] = poly.get(index1); } if (sign(general, poly.get(index2)) > 0) { ordered[0] = poly.get(index2); ordered[1] = poly.get(index3); } else { ordered[0] = poly.get(index3); ordered[1] = poly.get(index2); } } } }
public class class_name { protected void orderNode(SquareNode target, SquareNode node, boolean pointingX) { int index0 = findIntersection(target,node); int index1 = (index0+1)%4; int index2 = (index0+2)%4; int index3 = (index0+3)%4; if( index0 < 0 ) throw new RuntimeException("Couldn't find intersection. Probable bug"); lineCenters.a = target.center; lineCenters.b = node.center; UtilLine2D_F64.convert(lineCenters,general); Polygon2D_F64 poly = target.square; if( pointingX ) { if (sign(general, poly.get(index0)) > 0) { ordered[1] = poly.get(index1); // depends on control dependency: [if], data = [none] ordered[2] = poly.get(index0); // depends on control dependency: [if], data = [0)] } else { ordered[1] = poly.get(index0); // depends on control dependency: [if], data = [0)] ordered[2] = poly.get(index1); // depends on control dependency: [if], data = [none] } if (sign(general, poly.get(index2)) > 0) { ordered[3] = poly.get(index2); // depends on control dependency: [if], data = [none] ordered[0] = poly.get(index3); // depends on control dependency: [if], data = [none] } else { ordered[3] = poly.get(index3); // depends on control dependency: [if], data = [none] ordered[0] = poly.get(index2); // depends on control dependency: [if], data = [none] } } else { if (sign(general, poly.get(index0)) > 0) { ordered[2] = poly.get(index1); // depends on control dependency: [if], data = [none] ordered[3] = poly.get(index0); // depends on control dependency: [if], data = [0)] } else { ordered[2] = poly.get(index0); // depends on control dependency: [if], data = [0)] ordered[3] = poly.get(index1); // depends on control dependency: [if], data = [none] } if (sign(general, poly.get(index2)) > 0) { ordered[0] = poly.get(index2); // depends on control dependency: [if], data = [none] ordered[1] = poly.get(index3); // depends on control dependency: [if], data = [none] } else { ordered[0] = poly.get(index3); // depends on control dependency: [if], data = [none] ordered[1] = poly.get(index2); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public void marshall(TransitionState transitionState, ProtocolMarshaller protocolMarshaller) { if (transitionState == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(transitionState.getEnabled(), ENABLED_BINDING); protocolMarshaller.marshall(transitionState.getLastChangedBy(), LASTCHANGEDBY_BINDING); protocolMarshaller.marshall(transitionState.getLastChangedAt(), LASTCHANGEDAT_BINDING); protocolMarshaller.marshall(transitionState.getDisabledReason(), DISABLEDREASON_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(TransitionState transitionState, ProtocolMarshaller protocolMarshaller) { if (transitionState == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(transitionState.getEnabled(), ENABLED_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(transitionState.getLastChangedBy(), LASTCHANGEDBY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(transitionState.getLastChangedAt(), LASTCHANGEDAT_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(transitionState.getDisabledReason(), DISABLEDREASON_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setEcsServices(java.util.Collection<ECSService> ecsServices) { if (ecsServices == null) { this.ecsServices = null; return; } this.ecsServices = new com.amazonaws.internal.SdkInternalList<ECSService>(ecsServices); } }
public class class_name { public void setEcsServices(java.util.Collection<ECSService> ecsServices) { if (ecsServices == null) { this.ecsServices = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.ecsServices = new com.amazonaws.internal.SdkInternalList<ECSService>(ecsServices); } }
public class class_name { private void updateEligibility() { checkState(Thread.holdsLock(root), "Must hold lock to update eligibility"); synchronized (root) { if (!parent.isPresent()) { return; } if (isEligibleToStartNext()) { parent.get().addOrUpdateSubGroup(this); } else { parent.get().eligibleSubGroups.remove(this); lastStartMillis = 0; } parent.get().updateEligibility(); } } }
public class class_name { private void updateEligibility() { checkState(Thread.holdsLock(root), "Must hold lock to update eligibility"); synchronized (root) { if (!parent.isPresent()) { return; // depends on control dependency: [if], data = [none] } if (isEligibleToStartNext()) { parent.get().addOrUpdateSubGroup(this); // depends on control dependency: [if], data = [none] } else { parent.get().eligibleSubGroups.remove(this); // depends on control dependency: [if], data = [none] lastStartMillis = 0; // depends on control dependency: [if], data = [none] } parent.get().updateEligibility(); } } }
public class class_name { protected String getTitle(final String input, final char startDelim) { if (isStringNullOrEmpty(input)) { return null; } else { return ProcessorUtilities.cleanXMLCharacterReferences(StringUtilities.split(input, startDelim)[0].trim()); } } }
public class class_name { protected String getTitle(final String input, final char startDelim) { if (isStringNullOrEmpty(input)) { return null; // depends on control dependency: [if], data = [none] } else { return ProcessorUtilities.cleanXMLCharacterReferences(StringUtilities.split(input, startDelim)[0].trim()); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override protected UserTransaction locateUserTransaction() { try { return TransactionManagerLocator.INSTANCE.getUserTransaction(); } catch (Exception e) { throw new HibernateException(e); } } }
public class class_name { @Override protected UserTransaction locateUserTransaction() { try { return TransactionManagerLocator.INSTANCE.getUserTransaction(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new HibernateException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void start() throws JMSException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "start"); // We call start regardless of whether this is synchronous or async if (coreConsumerSession != null) { try { synchronized (closedLock) { if (!closed) { // Do not deliver message immediately on start. coreConsumerSession.start(false); } else { // This condition could be caused by tight looping closing of consumers at // the same time as calling connection.start. This is a less intrusive solution // than adding the locking that would otherwise be required. if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Avoided starting a consumer that has been closed."); } } } catch (SIException sice) { // No FFDC code needed // d222942 review. Default message ok. // This exception will be propogated back up to the Session, which will // cache the first exception thrown. Once all the consumers have been // started (or stopped) the first caught exception is then propogated // to the Connection for delivery to the application. // d238447 FFDC Review. Either external or already FFDCd, so don't FFDC here. throw (JMSException) JmsErrorUtils.newThrowable(JMSException.class, "EXCEPTION_RECEIVED_CWSIA0085", new Object[] { sice, "JmsMsgConsumerImpl.start" }, sice, null, // null probeId = no FFDC this, tc); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "start"); } }
public class class_name { protected void start() throws JMSException { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "start"); // We call start regardless of whether this is synchronous or async if (coreConsumerSession != null) { try { synchronized (closedLock) { if (!closed) { // Do not deliver message immediately on start. coreConsumerSession.start(false); // depends on control dependency: [if], data = [none] } else { // This condition could be caused by tight looping closing of consumers at // the same time as calling connection.start. This is a less intrusive solution // than adding the locking that would otherwise be required. if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(this, tc, "Avoided starting a consumer that has been closed."); } } } catch (SIException sice) { // No FFDC code needed // d222942 review. Default message ok. // This exception will be propogated back up to the Session, which will // cache the first exception thrown. Once all the consumers have been // started (or stopped) the first caught exception is then propogated // to the Connection for delivery to the application. // d238447 FFDC Review. Either external or already FFDCd, so don't FFDC here. throw (JMSException) JmsErrorUtils.newThrowable(JMSException.class, "EXCEPTION_RECEIVED_CWSIA0085", new Object[] { sice, "JmsMsgConsumerImpl.start" }, sice, null, // null probeId = no FFDC this, tc); } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(this, tc, "start"); } }
public class class_name { protected static GraphicsConfiguration getDefGC () { if (_gc == null) { // obtain information on our graphics environment try { GraphicsEnvironment env = GraphicsEnvironment.getLocalGraphicsEnvironment(); GraphicsDevice gd = env.getDefaultScreenDevice(); _gc = gd.getDefaultConfiguration(); } catch (HeadlessException e) { // no problem, just return null } } return _gc; } }
public class class_name { protected static GraphicsConfiguration getDefGC () { if (_gc == null) { // obtain information on our graphics environment try { GraphicsEnvironment env = GraphicsEnvironment.getLocalGraphicsEnvironment(); GraphicsDevice gd = env.getDefaultScreenDevice(); _gc = gd.getDefaultConfiguration(); // depends on control dependency: [try], data = [none] } catch (HeadlessException e) { // no problem, just return null } // depends on control dependency: [catch], data = [none] } return _gc; } }
public class class_name { private void handleChannelCallback(final String message) { // Channel callback updateConnectionHeartbeat(); // JSON callback final JSONArray jsonArray = new JSONArray(new JSONTokener(message)); if(connectionFeatureManager.isConnectionFeatureActive(BitfinexConnectionFeature.SEQ_ALL)) { sequenceNumberAuditor.auditPackage(jsonArray); } final int channel = jsonArray.getInt(0); final ChannelCallbackHandler channelCallbackHandler = channelIdToHandlerMap.get(channel); if (channelCallbackHandler == null) { logger.error("Unable to determine symbol for channel {} / data is {} ", channel, jsonArray); reconnect(); return; } String action = null; final JSONArray payload; if (jsonArray.get(1) instanceof String) { action = jsonArray.getString(1); payload = jsonArray.optJSONArray(2); } else { payload = jsonArray.optJSONArray(1); } if (Objects.equals(action, "hb")) { quoteManager.updateChannelHeartbeat(channelCallbackHandler.getSymbol()); } try { if (payload == null) { return; } channelCallbackHandler.handleChannelData(action, payload); } catch (final BitfinexClientException e) { logger.error("Got exception while handling callback", e); } } }
public class class_name { private void handleChannelCallback(final String message) { // Channel callback updateConnectionHeartbeat(); // JSON callback final JSONArray jsonArray = new JSONArray(new JSONTokener(message)); if(connectionFeatureManager.isConnectionFeatureActive(BitfinexConnectionFeature.SEQ_ALL)) { sequenceNumberAuditor.auditPackage(jsonArray); // depends on control dependency: [if], data = [none] } final int channel = jsonArray.getInt(0); final ChannelCallbackHandler channelCallbackHandler = channelIdToHandlerMap.get(channel); if (channelCallbackHandler == null) { logger.error("Unable to determine symbol for channel {} / data is {} ", channel, jsonArray); // depends on control dependency: [if], data = [none] reconnect(); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } String action = null; final JSONArray payload; if (jsonArray.get(1) instanceof String) { action = jsonArray.getString(1); // depends on control dependency: [if], data = [none] payload = jsonArray.optJSONArray(2); // depends on control dependency: [if], data = [none] } else { payload = jsonArray.optJSONArray(1); // depends on control dependency: [if], data = [none] } if (Objects.equals(action, "hb")) { quoteManager.updateChannelHeartbeat(channelCallbackHandler.getSymbol()); // depends on control dependency: [if], data = [none] } try { if (payload == null) { return; // depends on control dependency: [if], data = [none] } channelCallbackHandler.handleChannelData(action, payload); // depends on control dependency: [try], data = [none] } catch (final BitfinexClientException e) { logger.error("Got exception while handling callback", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public final int pageY() { if (getTouches() != null && getTouches().length() > 0) { return getTouches().get(0).getPageY(); } else { return getClientY() + GQuery.document.getScrollTop(); } } }
public class class_name { public final int pageY() { if (getTouches() != null && getTouches().length() > 0) { return getTouches().get(0).getPageY(); // depends on control dependency: [if], data = [none] } else { return getClientY() + GQuery.document.getScrollTop(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Object preInvoke(EJSDeployedSupport s, ContainerTx tx) throws RemoteException { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "preInvoke : " + this); // If this is container managed concurrency control, then use // then get the lock specified by the lock type found in the // EJB method info object. s.ivLockAcquired = false; // d571981 if (ivContainerManagedConcurrency) { // Get the lock type to use for the method being invoked. // and AccessTimeout value to use. EJBMethodInfoImpl mInfo = s.methodInfo; LockType lockType = mInfo.ivLockType; // Ensure we are not trying to upgrade from a READ to a WRITE lock. We // must throw an exception if trying to upgrade from READ to WRITE. if (lockType == LockType.WRITE && ivLock.isWriteLockedByCurrentThread() == false) { // Requesting write lock and write lock is not currently held by the // calling thread. So check whether calling thread holds any read locks. if (ivLock.getReadHoldCount() > 0) { throw new IllegalLoopbackException("A loopback method call is not allowed to upgrade from a READ to a WRITE lock."); } } if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke attempting to acquire a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); // PMI startLockTime for read and write locks F743-9002 long lockStartTime = 0; int lockStatType = 0; try { long timeout = mInfo.ivAccessTimeout; // For a persistent timeout callback that will wait a significant amount of // time for the lock, perform a quick lock attempt in an effort to detect a // deadlock that can occur between the singleton lock and the timer database // if there is a concurrent thread calling getTimers(). RTC126471 if (s.isPersistentTimeoutGlobalTx && PersistentTimerSingletonDeadlockTimeout >= 0 && (timeout == -1 || timeout > PersistentTimerSingletonDeadlockTimeout)) { if (lockType == LockType.READ) { if (pmiBean != null) { lockStatType = EJBPMICollaborator.READ_LOCK_TIME; lockStartTime = pmiBean.initialTime(lockStatType); } s.ivLockAcquired = ivReadLock.tryLock(PersistentTimerSingletonDeadlockTimeout, TimeUnit.MILLISECONDS); } else { if (pmiBean != null) { lockStatType = EJBPMICollaborator.WRITE_LOCK_TIME; lockStartTime = pmiBean.initialTime(lockStatType); } s.ivLockAcquired = ivWriteLock.tryLock(PersistentTimerSingletonDeadlockTimeout, TimeUnit.MILLISECONDS); } if (s.ivLockAcquired) { if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke acquired a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); } else { // If the lock was not obtained and another thread is in getTimer, // then a deadlock is very likely, so abort the timeout callback // which will free up the database row lock. if (ivInGetTimers.get() > 0) { if (pmiBean != null) { pmiBean.countCancelledLocks(); } throw new ConcurrentAccessTimeoutException("preInvoke timed out in attempt to acquire a " + mInfo.ivLockType.name() + " lock for method signature = " + mInfo.getMethodSignature() + ". Dead lock detected with timer database."); } // Otherwise, subtract the time waited, and proceed normally if (timeout != -1) { timeout = Math.max(0, timeout - PersistentTimerSingletonDeadlockTimeout); } } } // If the lock is not acquired, then either this is not for a persistent // timeout callback, or there is not a concurrent thread in getTimers(), // so just attempt to obtain a lock using the configured access timeout. if (!s.ivLockAcquired) { if (timeout == -1) // -1 means wait forever F743-21028.5 { if (lockType == LockType.READ) { if (pmiBean != null && lockStartTime == 0) // F743-9002 { lockStatType = EJBPMICollaborator.READ_LOCK_TIME; lockStartTime = pmiBean.initialTime(lockStatType); } ivReadLock.lock(); // d571981 } else { if (pmiBean != null && lockStartTime == 0)// F743-9002 { lockStatType = EJBPMICollaborator.WRITE_LOCK_TIME; lockStartTime = pmiBean.initialTime(lockStatType); } ivWriteLock.lock(); // d571981 } s.ivLockAcquired = true; if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke acquired a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); } else { if (lockType == LockType.READ) { if (pmiBean != null && lockStartTime == 0) // F743-9002 { lockStatType = EJBPMICollaborator.READ_LOCK_TIME; lockStartTime = pmiBean.initialTime(lockStatType); } s.ivLockAcquired = ivReadLock.tryLock(timeout, TimeUnit.MILLISECONDS); // d571981 } else { if (pmiBean != null && lockStartTime == 0) // F743-9002 { lockStatType = EJBPMICollaborator.WRITE_LOCK_TIME; lockStartTime = pmiBean.initialTime(lockStatType); } s.ivLockAcquired = ivWriteLock.tryLock(timeout, TimeUnit.MILLISECONDS); // d571981 } if (s.ivLockAcquired) { if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke acquired a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); } else { if (pmiBean != null) // F743-9002 { pmiBean.countCancelledLocks(); } throw new ConcurrentAccessTimeoutException("preInvoke timed out in attempt to acquire a " + mInfo.ivLockType.name() + " lock for method signature = " + mInfo.getMethodSignature() + ". Access timeout value = " + timeout + " milli-seconds"); } } } } catch (InterruptedException e) { if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "InterruptedException prevented lock from being acquired."); if (pmiBean != null) // F743-9002 { pmiBean.countCancelledLocks(); } throw ExceptionUtil.EJBException("InterruptedException prevented lock from being acquired.", e); } finally { // F743-9002 // Calculate the time to obtain the lock and adjust the pmiCookie // used for methodRT PMI counter to exclude the lock time. if (pmiBean != null) { // d648142.2 long lockDuration = pmiBean.finalTime(lockStatType, lockStartTime); if (lockDuration > 0) { s.pmiCookie += lockDuration; // d724734 } } } } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "preInvoke"); return ivEjbInstance; } }
public class class_name { @Override public Object preInvoke(EJSDeployedSupport s, ContainerTx tx) throws RemoteException { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "preInvoke : " + this); // If this is container managed concurrency control, then use // then get the lock specified by the lock type found in the // EJB method info object. s.ivLockAcquired = false; // d571981 if (ivContainerManagedConcurrency) { // Get the lock type to use for the method being invoked. // and AccessTimeout value to use. EJBMethodInfoImpl mInfo = s.methodInfo; LockType lockType = mInfo.ivLockType; // Ensure we are not trying to upgrade from a READ to a WRITE lock. We // must throw an exception if trying to upgrade from READ to WRITE. if (lockType == LockType.WRITE && ivLock.isWriteLockedByCurrentThread() == false) { // Requesting write lock and write lock is not currently held by the // calling thread. So check whether calling thread holds any read locks. if (ivLock.getReadHoldCount() > 0) { throw new IllegalLoopbackException("A loopback method call is not allowed to upgrade from a READ to a WRITE lock."); } } if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke attempting to acquire a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); // PMI startLockTime for read and write locks F743-9002 long lockStartTime = 0; int lockStatType = 0; try { long timeout = mInfo.ivAccessTimeout; // For a persistent timeout callback that will wait a significant amount of // time for the lock, perform a quick lock attempt in an effort to detect a // deadlock that can occur between the singleton lock and the timer database // if there is a concurrent thread calling getTimers(). RTC126471 if (s.isPersistentTimeoutGlobalTx && PersistentTimerSingletonDeadlockTimeout >= 0 && (timeout == -1 || timeout > PersistentTimerSingletonDeadlockTimeout)) { if (lockType == LockType.READ) { if (pmiBean != null) { lockStatType = EJBPMICollaborator.READ_LOCK_TIME; // depends on control dependency: [if], data = [none] lockStartTime = pmiBean.initialTime(lockStatType); // depends on control dependency: [if], data = [none] } s.ivLockAcquired = ivReadLock.tryLock(PersistentTimerSingletonDeadlockTimeout, TimeUnit.MILLISECONDS); // depends on control dependency: [if], data = [none] } else { if (pmiBean != null) { lockStatType = EJBPMICollaborator.WRITE_LOCK_TIME; // depends on control dependency: [if], data = [none] lockStartTime = pmiBean.initialTime(lockStatType); // depends on control dependency: [if], data = [none] } s.ivLockAcquired = ivWriteLock.tryLock(PersistentTimerSingletonDeadlockTimeout, TimeUnit.MILLISECONDS); // depends on control dependency: [if], data = [none] } if (s.ivLockAcquired) { if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke acquired a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); } else { // If the lock was not obtained and another thread is in getTimer, // then a deadlock is very likely, so abort the timeout callback // which will free up the database row lock. if (ivInGetTimers.get() > 0) { if (pmiBean != null) { pmiBean.countCancelledLocks(); // depends on control dependency: [if], data = [none] } throw new ConcurrentAccessTimeoutException("preInvoke timed out in attempt to acquire a " + mInfo.ivLockType.name() + " lock for method signature = " + mInfo.getMethodSignature() + ". Dead lock detected with timer database."); } // Otherwise, subtract the time waited, and proceed normally if (timeout != -1) { timeout = Math.max(0, timeout - PersistentTimerSingletonDeadlockTimeout); // depends on control dependency: [if], data = [none] } } } // If the lock is not acquired, then either this is not for a persistent // timeout callback, or there is not a concurrent thread in getTimers(), // so just attempt to obtain a lock using the configured access timeout. if (!s.ivLockAcquired) { if (timeout == -1) // -1 means wait forever F743-21028.5 { if (lockType == LockType.READ) { if (pmiBean != null && lockStartTime == 0) // F743-9002 { lockStatType = EJBPMICollaborator.READ_LOCK_TIME; // depends on control dependency: [if], data = [none] lockStartTime = pmiBean.initialTime(lockStatType); // depends on control dependency: [if], data = [none] } ivReadLock.lock(); // d571981 // depends on control dependency: [if], data = [none] } else { if (pmiBean != null && lockStartTime == 0)// F743-9002 { lockStatType = EJBPMICollaborator.WRITE_LOCK_TIME; // depends on control dependency: [if], data = [none] lockStartTime = pmiBean.initialTime(lockStatType); // depends on control dependency: [if], data = [none] } ivWriteLock.lock(); // d571981 // depends on control dependency: [if], data = [none] } s.ivLockAcquired = true; // depends on control dependency: [if], data = [none] if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke acquired a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); } else { if (lockType == LockType.READ) { if (pmiBean != null && lockStartTime == 0) // F743-9002 { lockStatType = EJBPMICollaborator.READ_LOCK_TIME; // depends on control dependency: [if], data = [none] lockStartTime = pmiBean.initialTime(lockStatType); // depends on control dependency: [if], data = [none] } s.ivLockAcquired = ivReadLock.tryLock(timeout, TimeUnit.MILLISECONDS); // d571981 // depends on control dependency: [if], data = [none] } else { if (pmiBean != null && lockStartTime == 0) // F743-9002 { lockStatType = EJBPMICollaborator.WRITE_LOCK_TIME; // depends on control dependency: [if], data = [none] lockStartTime = pmiBean.initialTime(lockStatType); // depends on control dependency: [if], data = [none] } s.ivLockAcquired = ivWriteLock.tryLock(timeout, TimeUnit.MILLISECONDS); // d571981 // depends on control dependency: [if], data = [none] } if (s.ivLockAcquired) { if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "preInvoke acquired a " + mInfo.ivLockType.name() + " lock. " + ivLock.toString()); } else { if (pmiBean != null) // F743-9002 { pmiBean.countCancelledLocks(); // depends on control dependency: [if], data = [none] } throw new ConcurrentAccessTimeoutException("preInvoke timed out in attempt to acquire a " + mInfo.ivLockType.name() + " lock for method signature = " + mInfo.getMethodSignature() + ". Access timeout value = " + timeout + " milli-seconds"); } } } } catch (InterruptedException e) { if (isTraceOn && tc.isDebugEnabled()) Tr.debug(tc, "InterruptedException prevented lock from being acquired."); if (pmiBean != null) // F743-9002 { pmiBean.countCancelledLocks(); // depends on control dependency: [if], data = [none] } throw ExceptionUtil.EJBException("InterruptedException prevented lock from being acquired.", e); } finally { // depends on control dependency: [catch], data = [none] // F743-9002 // Calculate the time to obtain the lock and adjust the pmiCookie // used for methodRT PMI counter to exclude the lock time. if (pmiBean != null) { // d648142.2 long lockDuration = pmiBean.finalTime(lockStatType, lockStartTime); if (lockDuration > 0) { s.pmiCookie += lockDuration; // d724734 // depends on control dependency: [if], data = [none] } } } } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "preInvoke"); return ivEjbInstance; } }
public class class_name { private static <C extends Component<C, I>, I extends IInterface> void bindToService(final Context context, final Descriptor<C, I> descriptor, final ConnectionCallback<C> callback) { Intent intent = new Intent(); intent.setComponent(SPFInfo.getSPFServiceComponentName()); intent.setAction(descriptor.getActionName()); ServiceConnection connection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder binder) { I service = descriptor.castInterface(binder); C instance = descriptor.createInstance(context, service, this, callback); callback.onServiceReady(instance); } @Override public void onServiceDisconnected(ComponentName name) { callback.onDisconnect(); } }; if (!context.bindService(intent, connection, Context.BIND_AUTO_CREATE)) { callback.onError(new SPFError(SPFError.SPF_NOT_INSTALLED_ERROR_CODE)); } } }
public class class_name { private static <C extends Component<C, I>, I extends IInterface> void bindToService(final Context context, final Descriptor<C, I> descriptor, final ConnectionCallback<C> callback) { Intent intent = new Intent(); intent.setComponent(SPFInfo.getSPFServiceComponentName()); intent.setAction(descriptor.getActionName()); ServiceConnection connection = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder binder) { I service = descriptor.castInterface(binder); C instance = descriptor.createInstance(context, service, this, callback); callback.onServiceReady(instance); } @Override public void onServiceDisconnected(ComponentName name) { callback.onDisconnect(); } }; if (!context.bindService(intent, connection, Context.BIND_AUTO_CREATE)) { callback.onError(new SPFError(SPFError.SPF_NOT_INSTALLED_ERROR_CODE)); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Action(name = "Update Property Value", outputs = { @Output(RETURN_CODE), @Output(RETURN_RESULT), @Output(EXCEPTION), @Output(STDERR) }, responses = { @Response(text = ResponseNames.SUCCESS, field = RETURN_CODE, value = SUCCESS, matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.RESOLVED), @Response(text = ResponseNames.FAILURE, field = RETURN_CODE, value = FAILURE, matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.ERROR, isOnFail = true) }) public Map<String, String> execute( @Param(value = FILE_PATH, required = true) String installationPath, @Param(value = LISTEN_ADDRESSES, required = true) String listenAddresses, @Param(value = PORT) String port, @Param(value = SSL) String ssl, @Param(value = SSL_CA_FILE) String sslCaFile, @Param(value = SSL_CERT_FILE) String sslCertFile, @Param(value = SSL_KEY_FILE) String sslKeyFile, @Param(value = MAX_CONNECTIONS) String maxConnections, @Param(value = SHARED_BUFFERS) String sharedBuffers, @Param(value = EFFECTIVE_CACHE_SIZE) String effectiveCacheSize, @Param(value = AUTOVACUUM) String autovacuum, @Param(value = WORK_MEM) String workMem ) { try { Map<String, Object> keyValues = ConfigService.validateAndBuildKeyValuesMap( listenAddresses, port, ssl, sslCaFile, sslCertFile, sslKeyFile, maxConnections, sharedBuffers, effectiveCacheSize, autovacuum, workMem); ConfigService.changeProperty(installationPath, keyValues); return getSuccessResultsMap("Updated postgresql.conf successfully"); } catch (Exception e) { return getFailureResultsMap("Failed to update postgresql.conf", e); } } }
public class class_name { @Action(name = "Update Property Value", outputs = { @Output(RETURN_CODE), @Output(RETURN_RESULT), @Output(EXCEPTION), @Output(STDERR) }, responses = { @Response(text = ResponseNames.SUCCESS, field = RETURN_CODE, value = SUCCESS, matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.RESOLVED), @Response(text = ResponseNames.FAILURE, field = RETURN_CODE, value = FAILURE, matchType = MatchType.COMPARE_EQUAL, responseType = ResponseType.ERROR, isOnFail = true) }) public Map<String, String> execute( @Param(value = FILE_PATH, required = true) String installationPath, @Param(value = LISTEN_ADDRESSES, required = true) String listenAddresses, @Param(value = PORT) String port, @Param(value = SSL) String ssl, @Param(value = SSL_CA_FILE) String sslCaFile, @Param(value = SSL_CERT_FILE) String sslCertFile, @Param(value = SSL_KEY_FILE) String sslKeyFile, @Param(value = MAX_CONNECTIONS) String maxConnections, @Param(value = SHARED_BUFFERS) String sharedBuffers, @Param(value = EFFECTIVE_CACHE_SIZE) String effectiveCacheSize, @Param(value = AUTOVACUUM) String autovacuum, @Param(value = WORK_MEM) String workMem ) { try { Map<String, Object> keyValues = ConfigService.validateAndBuildKeyValuesMap( listenAddresses, port, ssl, sslCaFile, sslCertFile, sslKeyFile, maxConnections, sharedBuffers, effectiveCacheSize, autovacuum, workMem); ConfigService.changeProperty(installationPath, keyValues); // depends on control dependency: [try], data = [none] return getSuccessResultsMap("Updated postgresql.conf successfully"); // depends on control dependency: [try], data = [none] } catch (Exception e) { return getFailureResultsMap("Failed to update postgresql.conf", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void cleanFile(String path) { try { PrintWriter writer; writer = new PrintWriter(path); writer.print(""); writer.close(); } catch (FileNotFoundException e) { throw new RuntimeException("An error occurred while cleaning the file: " + e.getMessage(), e); } } }
public class class_name { protected void cleanFile(String path) { try { PrintWriter writer; writer = new PrintWriter(path); // depends on control dependency: [try], data = [none] writer.print(""); // depends on control dependency: [try], data = [none] writer.close(); // depends on control dependency: [try], data = [none] } catch (FileNotFoundException e) { throw new RuntimeException("An error occurred while cleaning the file: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static String getCommonSymbol(IBond bondA, IBond bondB) { String symbol = ""; if (bondA.contains(bondB.getBegin())) { symbol = bondB.getBegin().getSymbol(); } else if (bondA.contains(bondB.getEnd())) { symbol = bondB.getEnd().getSymbol(); } return symbol; } }
public class class_name { private static String getCommonSymbol(IBond bondA, IBond bondB) { String symbol = ""; if (bondA.contains(bondB.getBegin())) { symbol = bondB.getBegin().getSymbol(); // depends on control dependency: [if], data = [none] } else if (bondA.contains(bondB.getEnd())) { symbol = bondB.getEnd().getSymbol(); // depends on control dependency: [if], data = [none] } return symbol; } }
public class class_name { private void split(Block<S, L> splitter) { // STEP 1: Collect the states that have outgoing edges // pointing to states inside the currently considered blocks. // Also, a list of transition labels occuring on these // edges is created. for (State<S, L> state : splitter.getStates()) { for (Edge<S, L> edge : state.getIncoming()) { TransitionLabel<S, L> transition = edge.getTransitionLabel(); State<S, L> newState = edge.getSource(); // Blocks that only contain a single state cannot // be split any further, and thus are of no // interest. if (newState.isSingletonBlock()) { continue; //continue; } if (transition.addToSet(newState)) { letterList.add(transition); } } } // STEP 2: Build the signatures. A signature of a state // is a sequence of the transition labels of its outgoing // edge that point into the considered split block. // The iteration over the label list in the outer loop // guarantees a consistent ordering of the transition labels. for (TransitionLabel<S, L> letter : letterList) { for (State<S, L> state : letter.getSet()) { if (state.addToSignature(letter)) { stateList.add(state); state.setSplitPoint(false); } } letter.clearSet(); } letterList.clear(); // STEP 3: Discriminate the states. This is done by weak // sorting the states. At the end of the weak sort, the finalList // will contain the states in such an order that only states belonging // to the same block having the same signature will be contiguous. // First, initialize the buckets of each block. This is done // for grouping the states by their corresponding block. for (State<S, L> state : stateList) { Block<S, L> block = state.getBlock(); if (block.addToBucket(state)) { splitBlocks.add(block); } } stateList.clear(); for (Block<S, L> block : splitBlocks) { stateList.concat(block.getBucket()); } // Now, the states are ordered according to their signatures int i = 0; while (!stateList.isEmpty()) { for (State<S, L> state : stateList) { TransitionLabel<S, L> letter = state.getSignatureLetter(i); if (letter == null) { finalList.pushBack(state); } else if (letter.addToBucket(state)) { letterList.add(letter); } // If this state was the first to be added to the respective // bucket, or it differs from the previous entry in the previous // letter, it is a split point. if (state.getPrev() == null) { state.setSplitPoint(true); } else if (i > 0 && state.getPrev().getSignatureLetter(i - 1) != state.getSignatureLetter(i - 1)) { state.setSplitPoint(true); } } stateList.clear(); for (TransitionLabel<S, L> letter : letterList) { stateList.concat(letter.getBucket()); } letterList.clear(); i++; } Block<S, L> prevBlock = null; State<S, L> prev = null; for (State<S, L> state : finalList) { Block<S, L> currBlock = state.getBlock(); if (currBlock != prevBlock) { currBlock.createSubBlock(); prevBlock = currBlock; } else if (state.isSplitPoint()) { currBlock.createSubBlock(); } currBlock.addToSubBlock(state); if (prev != null) { prev.reset(); } prev = state; } if (prev != null) { prev.reset(); } finalList.clear(); // Step 4 of the algorithm is done in the method // updateBlocks() } }
public class class_name { private void split(Block<S, L> splitter) { // STEP 1: Collect the states that have outgoing edges // pointing to states inside the currently considered blocks. // Also, a list of transition labels occuring on these // edges is created. for (State<S, L> state : splitter.getStates()) { for (Edge<S, L> edge : state.getIncoming()) { TransitionLabel<S, L> transition = edge.getTransitionLabel(); State<S, L> newState = edge.getSource(); // Blocks that only contain a single state cannot // be split any further, and thus are of no // interest. if (newState.isSingletonBlock()) { continue; //continue; } if (transition.addToSet(newState)) { letterList.add(transition); // depends on control dependency: [if], data = [none] } } } // STEP 2: Build the signatures. A signature of a state // is a sequence of the transition labels of its outgoing // edge that point into the considered split block. // The iteration over the label list in the outer loop // guarantees a consistent ordering of the transition labels. for (TransitionLabel<S, L> letter : letterList) { for (State<S, L> state : letter.getSet()) { if (state.addToSignature(letter)) { stateList.add(state); // depends on control dependency: [if], data = [none] state.setSplitPoint(false); // depends on control dependency: [if], data = [none] } } letter.clearSet(); // depends on control dependency: [for], data = [letter] } letterList.clear(); // STEP 3: Discriminate the states. This is done by weak // sorting the states. At the end of the weak sort, the finalList // will contain the states in such an order that only states belonging // to the same block having the same signature will be contiguous. // First, initialize the buckets of each block. This is done // for grouping the states by their corresponding block. for (State<S, L> state : stateList) { Block<S, L> block = state.getBlock(); if (block.addToBucket(state)) { splitBlocks.add(block); // depends on control dependency: [if], data = [none] } } stateList.clear(); for (Block<S, L> block : splitBlocks) { stateList.concat(block.getBucket()); // depends on control dependency: [for], data = [block] } // Now, the states are ordered according to their signatures int i = 0; while (!stateList.isEmpty()) { for (State<S, L> state : stateList) { TransitionLabel<S, L> letter = state.getSignatureLetter(i); if (letter == null) { finalList.pushBack(state); // depends on control dependency: [if], data = [none] } else if (letter.addToBucket(state)) { letterList.add(letter); // depends on control dependency: [if], data = [none] } // If this state was the first to be added to the respective // bucket, or it differs from the previous entry in the previous // letter, it is a split point. if (state.getPrev() == null) { state.setSplitPoint(true); // depends on control dependency: [if], data = [none] } else if (i > 0 && state.getPrev().getSignatureLetter(i - 1) != state.getSignatureLetter(i - 1)) { state.setSplitPoint(true); // depends on control dependency: [if], data = [none] } } stateList.clear(); // depends on control dependency: [while], data = [none] for (TransitionLabel<S, L> letter : letterList) { stateList.concat(letter.getBucket()); // depends on control dependency: [for], data = [letter] } letterList.clear(); // depends on control dependency: [while], data = [none] i++; // depends on control dependency: [while], data = [none] } Block<S, L> prevBlock = null; State<S, L> prev = null; for (State<S, L> state : finalList) { Block<S, L> currBlock = state.getBlock(); if (currBlock != prevBlock) { currBlock.createSubBlock(); // depends on control dependency: [if], data = [none] prevBlock = currBlock; // depends on control dependency: [if], data = [none] } else if (state.isSplitPoint()) { currBlock.createSubBlock(); // depends on control dependency: [if], data = [none] } currBlock.addToSubBlock(state); // depends on control dependency: [for], data = [state] if (prev != null) { prev.reset(); // depends on control dependency: [if], data = [none] } prev = state; // depends on control dependency: [for], data = [state] } if (prev != null) { prev.reset(); // depends on control dependency: [if], data = [none] } finalList.clear(); // Step 4 of the algorithm is done in the method // updateBlocks() } }
public class class_name { private synchronized void reconnect() { XMPPConnection connection = this.weakRefConnection.get(); if (connection == null) { LOGGER.fine("Connection is null, will not reconnect"); return; } // Since there is no thread running, creates a new one to attempt // the reconnection. // avoid to run duplicated reconnectionThread -- fd: 16/09/2010 if (reconnectionThread != null && reconnectionThread.isAlive()) return; reconnectionThread = Async.go(reconnectionRunnable, "Smack Reconnection Manager (" + connection.getConnectionCounter() + ')'); } }
public class class_name { private synchronized void reconnect() { XMPPConnection connection = this.weakRefConnection.get(); if (connection == null) { LOGGER.fine("Connection is null, will not reconnect"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // Since there is no thread running, creates a new one to attempt // the reconnection. // avoid to run duplicated reconnectionThread -- fd: 16/09/2010 if (reconnectionThread != null && reconnectionThread.isAlive()) return; reconnectionThread = Async.go(reconnectionRunnable, "Smack Reconnection Manager (" + connection.getConnectionCounter() + ')'); } }
public class class_name { @SuppressWarnings("ResultOfMethodCallIgnored") static private void deleteDirectory(File path) { if (path.exists()) { //noinspection ConstantConditions for (File f : path.listFiles()) { if (f.isDirectory()) { deleteDirectory(f); } f.delete(); } path.delete(); } } }
public class class_name { @SuppressWarnings("ResultOfMethodCallIgnored") static private void deleteDirectory(File path) { if (path.exists()) { //noinspection ConstantConditions for (File f : path.listFiles()) { if (f.isDirectory()) { deleteDirectory(f); // depends on control dependency: [if], data = [none] } f.delete(); // depends on control dependency: [for], data = [f] } path.delete(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void expandTo(int wordIndex) { int wordsRequired = wordIndex + 1; if (firstEmptyWord < wordsRequired) { ensureCapacity(wordsRequired); firstEmptyWord = wordsRequired; } } }
public class class_name { private void expandTo(int wordIndex) { int wordsRequired = wordIndex + 1; if (firstEmptyWord < wordsRequired) { ensureCapacity(wordsRequired); // depends on control dependency: [if], data = [wordsRequired)] firstEmptyWord = wordsRequired; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Map<String, JdbcType> retrieveDateColumns(String database, String table) throws SQLException { Map<String, JdbcType> targetDataTypes = ImmutableMap.<String, JdbcType>builder().put("DATE", JdbcType.DATE).put("TIME WITH TIME ZONE", JdbcType.TIME) .put("TIME WITHOUT TIME ZONE", JdbcType.TIME).put("TIMESTAMP WITH TIME ZONE", JdbcType.TIMESTAMP) .put("TIMESTAMP WITHOUT TIME ZONE", JdbcType.TIMESTAMP).build(); ImmutableMap.Builder<String, JdbcType> dateColumnsBuilder = ImmutableMap.builder(); try (PreparedStatement pstmt = this.conn .prepareStatement(INFORMATION_SCHEMA_SELECT_SQL_PSTMT, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) { pstmt.setString(1, database); pstmt.setString(2, table); log.info("Retrieving column type information from SQL: " + pstmt); try (ResultSet rs = pstmt.executeQuery()) { if (!rs.first()) { throw new IllegalArgumentException("No result from information_schema.columns"); } do { String type = rs.getString("data_type").toUpperCase(); JdbcType convertedType = targetDataTypes.get(type); if (convertedType != null) { dateColumnsBuilder.put(rs.getString("column_name"), convertedType); } } while (rs.next()); } } return dateColumnsBuilder.build(); } }
public class class_name { @Override public Map<String, JdbcType> retrieveDateColumns(String database, String table) throws SQLException { Map<String, JdbcType> targetDataTypes = ImmutableMap.<String, JdbcType>builder().put("DATE", JdbcType.DATE).put("TIME WITH TIME ZONE", JdbcType.TIME) .put("TIME WITHOUT TIME ZONE", JdbcType.TIME).put("TIMESTAMP WITH TIME ZONE", JdbcType.TIMESTAMP) .put("TIMESTAMP WITHOUT TIME ZONE", JdbcType.TIMESTAMP).build(); ImmutableMap.Builder<String, JdbcType> dateColumnsBuilder = ImmutableMap.builder(); try (PreparedStatement pstmt = this.conn .prepareStatement(INFORMATION_SCHEMA_SELECT_SQL_PSTMT, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)) { pstmt.setString(1, database); pstmt.setString(2, table); log.info("Retrieving column type information from SQL: " + pstmt); try (ResultSet rs = pstmt.executeQuery()) { if (!rs.first()) { throw new IllegalArgumentException("No result from information_schema.columns"); } do { String type = rs.getString("data_type").toUpperCase(); JdbcType convertedType = targetDataTypes.get(type); if (convertedType != null) { dateColumnsBuilder.put(rs.getString("column_name"), convertedType); // depends on control dependency: [if], data = [none] } } while (rs.next()); } } return dateColumnsBuilder.build(); } }
public class class_name { public static IPath getGlobalSARLOutputPath() { final Injector injector = LangActivator.getInstance().getInjector(LangActivator.IO_SARL_LANG_SARL); final IOutputConfigurationProvider configurationProvider = injector.getInstance(IOutputConfigurationProvider.class); final OutputConfiguration config = Iterables.find( configurationProvider.getOutputConfigurations(), it -> Objects.equals(it.getName(), IFileSystemAccess.DEFAULT_OUTPUT)); if (config != null) { final String path = config.getOutputDirectory(); if (!Strings.isNullOrEmpty(path)) { final IPath pathObject = Path.fromOSString(path); if (pathObject != null) { return pathObject; } } } throw new IllegalStateException("No global preferences found for SARL."); //$NON-NLS-1$ } }
public class class_name { public static IPath getGlobalSARLOutputPath() { final Injector injector = LangActivator.getInstance().getInjector(LangActivator.IO_SARL_LANG_SARL); final IOutputConfigurationProvider configurationProvider = injector.getInstance(IOutputConfigurationProvider.class); final OutputConfiguration config = Iterables.find( configurationProvider.getOutputConfigurations(), it -> Objects.equals(it.getName(), IFileSystemAccess.DEFAULT_OUTPUT)); if (config != null) { final String path = config.getOutputDirectory(); if (!Strings.isNullOrEmpty(path)) { final IPath pathObject = Path.fromOSString(path); if (pathObject != null) { return pathObject; // depends on control dependency: [if], data = [none] } } } throw new IllegalStateException("No global preferences found for SARL."); //$NON-NLS-1$ } }
public class class_name { @SuppressWarnings("unchecked") public static <E, T> PageResult<T> createPage(final Iterable<? extends E> entries, final PageRequest pageRequest, final long totalSize, final PageEntryTransformer<T, E> transformer) { final PageResult<T> page = new PageResult<>(); page.setPageRequest(pageRequest == null ? new PageRequestDto() : pageRequest); page.setTotalSize(totalSize); if (entries != null) { for (E entry : entries) { if (transformer == null) { page.getEntries().add((T) entry); } else { T targetEntry = transformer.transform(entry); page.getEntries().add(targetEntry); } } } return page; } }
public class class_name { @SuppressWarnings("unchecked") public static <E, T> PageResult<T> createPage(final Iterable<? extends E> entries, final PageRequest pageRequest, final long totalSize, final PageEntryTransformer<T, E> transformer) { final PageResult<T> page = new PageResult<>(); page.setPageRequest(pageRequest == null ? new PageRequestDto() : pageRequest); page.setTotalSize(totalSize); if (entries != null) { for (E entry : entries) { if (transformer == null) { page.getEntries().add((T) entry); // depends on control dependency: [if], data = [none] } else { T targetEntry = transformer.transform(entry); page.getEntries().add(targetEntry); // depends on control dependency: [if], data = [none] } } } return page; } }
public class class_name { @SuppressWarnings("deprecation") @SuppressLint("NewApi") private void addLinkToClipBoard(String url, String label) { int sdk = android.os.Build.VERSION.SDK_INT; if (sdk < android.os.Build.VERSION_CODES.HONEYCOMB) { android.text.ClipboardManager clipboard = (android.text.ClipboardManager) context_.getSystemService(Context.CLIPBOARD_SERVICE); clipboard.setText(url); } else { android.content.ClipboardManager clipboard = (android.content.ClipboardManager) context_.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText(label, url); clipboard.setPrimaryClip(clip); } Toast.makeText(context_, builder_.getUrlCopiedMessage(), Toast.LENGTH_SHORT).show(); } }
public class class_name { @SuppressWarnings("deprecation") @SuppressLint("NewApi") private void addLinkToClipBoard(String url, String label) { int sdk = android.os.Build.VERSION.SDK_INT; if (sdk < android.os.Build.VERSION_CODES.HONEYCOMB) { android.text.ClipboardManager clipboard = (android.text.ClipboardManager) context_.getSystemService(Context.CLIPBOARD_SERVICE); clipboard.setText(url); // depends on control dependency: [if], data = [none] } else { android.content.ClipboardManager clipboard = (android.content.ClipboardManager) context_.getSystemService(Context.CLIPBOARD_SERVICE); android.content.ClipData clip = android.content.ClipData.newPlainText(label, url); clipboard.setPrimaryClip(clip); // depends on control dependency: [if], data = [none] } Toast.makeText(context_, builder_.getUrlCopiedMessage(), Toast.LENGTH_SHORT).show(); } }
public class class_name { public void marshall(NewBGPPeer newBGPPeer, ProtocolMarshaller protocolMarshaller) { if (newBGPPeer == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(newBGPPeer.getAsn(), ASN_BINDING); protocolMarshaller.marshall(newBGPPeer.getAuthKey(), AUTHKEY_BINDING); protocolMarshaller.marshall(newBGPPeer.getAddressFamily(), ADDRESSFAMILY_BINDING); protocolMarshaller.marshall(newBGPPeer.getAmazonAddress(), AMAZONADDRESS_BINDING); protocolMarshaller.marshall(newBGPPeer.getCustomerAddress(), CUSTOMERADDRESS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(NewBGPPeer newBGPPeer, ProtocolMarshaller protocolMarshaller) { if (newBGPPeer == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(newBGPPeer.getAsn(), ASN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(newBGPPeer.getAuthKey(), AUTHKEY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(newBGPPeer.getAddressFamily(), ADDRESSFAMILY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(newBGPPeer.getAmazonAddress(), AMAZONADDRESS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(newBGPPeer.getCustomerAddress(), CUSTOMERADDRESS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void decodeProperties(Set<PropertyCriteria> properties, String encoded) { if (encoded != null && !encoded.trim().isEmpty()) { StringTokenizer st = new StringTokenizer(encoded, ","); while (st.hasMoreTokens()) { String token = st.nextToken(); String[] parts = token.split("[|]"); if (parts.length >= 2) { String name = parts[0].trim(); String value = parts[1].trim(); Operator op = Operator.HAS; if (parts.length > 2) { op = Operator.valueOf(parts[2].trim()); } log.tracef("Extracted property name [%s] value [%s] operator [%s]", name, value, op); properties.add(new PropertyCriteria(name, value, op)); } } } } }
public class class_name { public static void decodeProperties(Set<PropertyCriteria> properties, String encoded) { if (encoded != null && !encoded.trim().isEmpty()) { StringTokenizer st = new StringTokenizer(encoded, ","); while (st.hasMoreTokens()) { String token = st.nextToken(); String[] parts = token.split("[|]"); if (parts.length >= 2) { String name = parts[0].trim(); String value = parts[1].trim(); Operator op = Operator.HAS; if (parts.length > 2) { op = Operator.valueOf(parts[2].trim()); // depends on control dependency: [if], data = [none] } log.tracef("Extracted property name [%s] value [%s] operator [%s]", name, value, op); // depends on control dependency: [if], data = [none] properties.add(new PropertyCriteria(name, value, op)); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public DescribeProductAsAdminResult withTagOptions(TagOptionDetail... tagOptions) { if (this.tagOptions == null) { setTagOptions(new java.util.ArrayList<TagOptionDetail>(tagOptions.length)); } for (TagOptionDetail ele : tagOptions) { this.tagOptions.add(ele); } return this; } }
public class class_name { public DescribeProductAsAdminResult withTagOptions(TagOptionDetail... tagOptions) { if (this.tagOptions == null) { setTagOptions(new java.util.ArrayList<TagOptionDetail>(tagOptions.length)); // depends on control dependency: [if], data = [none] } for (TagOptionDetail ele : tagOptions) { this.tagOptions.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public Expression<java.util.Date> in(java.util.Date[] value) { SimpleDateFormat formatter = getDateTimeFormatter(); String valueString = ""; Boolean firstCalendar = true; for (Date v : value) { if (firstCalendar) { valueString = valueString.concat("('").concat(formatter.format(v).concat("Z")).concat("'"); firstCalendar = false; } else { valueString = valueString.concat(", '").concat(formatter.format(v).concat("Z")).concat("'"); } } valueString = valueString.concat(")"); return new Expression<java.util.Date>(this, Operation.in, valueString); } }
public class class_name { public Expression<java.util.Date> in(java.util.Date[] value) { SimpleDateFormat formatter = getDateTimeFormatter(); String valueString = ""; Boolean firstCalendar = true; for (Date v : value) { if (firstCalendar) { valueString = valueString.concat("('").concat(formatter.format(v).concat("Z")).concat("'"); firstCalendar = false; // depends on control dependency: [if], data = [none] } else { valueString = valueString.concat(", '").concat(formatter.format(v).concat("Z")).concat("'"); // depends on control dependency: [if], data = [none] } } valueString = valueString.concat(")"); return new Expression<java.util.Date>(this, Operation.in, valueString); } }
public class class_name { @Override public void processEvent(ListenerEvent event, SequenceVectors<T> sequenceVectors, long argument) { try { locker.acquire(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder builder = new StringBuilder(targetFolder.getAbsolutePath()); builder.append("/").append(modelPrefix).append("_").append(sdf.format(new Date())).append(".seqvec"); File targetFile = new File(builder.toString()); if (useBinarySerialization) { SerializationUtils.saveObject(sequenceVectors, targetFile); } else { throw new UnsupportedOperationException("Not implemented yet"); } } catch (Exception e) { e.printStackTrace(); } finally { locker.release(); } } }
public class class_name { @Override public void processEvent(ListenerEvent event, SequenceVectors<T> sequenceVectors, long argument) { try { locker.acquire(); // depends on control dependency: [try], data = [none] SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder builder = new StringBuilder(targetFolder.getAbsolutePath()); builder.append("/").append(modelPrefix).append("_").append(sdf.format(new Date())).append(".seqvec"); // depends on control dependency: [try], data = [none] File targetFile = new File(builder.toString()); if (useBinarySerialization) { SerializationUtils.saveObject(sequenceVectors, targetFile); // depends on control dependency: [if], data = [none] } else { throw new UnsupportedOperationException("Not implemented yet"); } } catch (Exception e) { e.printStackTrace(); } finally { // depends on control dependency: [catch], data = [none] locker.release(); } } }
public class class_name { private StringBuffer getArgsString(Object[] args) { StringBuffer buffer = new StringBuffer(); String prefix = "args "; for (int i = 0; i < args.length; i++) { if (args.length > 1) { buffer.append(prefix + (i + 1)); } buffer.append("\r\t"); buffer.append(getResultString(args[i])); buffer.append("\n"); } return buffer; } }
public class class_name { private StringBuffer getArgsString(Object[] args) { StringBuffer buffer = new StringBuffer(); String prefix = "args "; for (int i = 0; i < args.length; i++) { if (args.length > 1) { buffer.append(prefix + (i + 1)); // depends on control dependency: [if], data = [1)] } buffer.append("\r\t"); // depends on control dependency: [for], data = [none] buffer.append(getResultString(args[i])); // depends on control dependency: [for], data = [i] buffer.append("\n"); // depends on control dependency: [for], data = [none] } return buffer; } }
public class class_name { public static M2MEntity extractEntityManagedByDAO(TypeElement daoElement) { ClassName entity1 = null; ClassName entity2 = null; String prefixId = null; String tableName = null; String entityName = null; PackageElement pkg = null; String packageName = null; boolean needToCreate = true; boolean generatedMethods=true; boolean immutable=true; if (daoElement.getAnnotation(BindDaoMany2Many.class) != null) { entity1 = TypeUtility.className(AnnotationUtility.extractAsClassName(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.ENTITY_1)); entity2 = TypeUtility.className(AnnotationUtility.extractAsClassName(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.ENTITY_2)); prefixId = AnnotationUtility.extractAsString(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.ID_NAME); tableName = AnnotationUtility.extractAsString(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.TABLE_NAME); tableName = AnnotationUtility.extractAsString(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.TABLE_NAME); immutable = AnnotationUtility.extractAsBoolean(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.IMMUTABLE); generatedMethods=AnnotationUtility.extractAsBoolean(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.METHODS); entityName = entity1.simpleName() + entity2.simpleName(); pkg = BaseProcessor.elementUtils.getPackageOf(daoElement); packageName = pkg.isUnnamed() ? null : pkg.getQualifiedName().toString(); } if (daoElement.getAnnotation(BindDao.class) != null) { // we have @BindDao String derived = AnnotationUtility.extractAsClassName(daoElement, BindDao.class, AnnotationAttributeType.VALUE); ClassName clazz = TypeUtility.className(derived); packageName = clazz.packageName(); entityName = clazz.simpleName(); String tableTemp = AnnotationUtility.extractAsClassName(daoElement, BindDao.class, AnnotationAttributeType.TABLE_NAME); if (StringUtils.hasText(tableTemp)) { tableName = tableTemp; } needToCreate = false; } M2MEntity entity = new M2MEntity(daoElement, packageName, entityName, TypeUtility.className(daoElement.asType().toString()), entity1, entity2, prefixId, tableName, needToCreate, generatedMethods, immutable); return entity; } }
public class class_name { public static M2MEntity extractEntityManagedByDAO(TypeElement daoElement) { ClassName entity1 = null; ClassName entity2 = null; String prefixId = null; String tableName = null; String entityName = null; PackageElement pkg = null; String packageName = null; boolean needToCreate = true; boolean generatedMethods=true; boolean immutable=true; if (daoElement.getAnnotation(BindDaoMany2Many.class) != null) { entity1 = TypeUtility.className(AnnotationUtility.extractAsClassName(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.ENTITY_1)); // depends on control dependency: [if], data = [none] entity2 = TypeUtility.className(AnnotationUtility.extractAsClassName(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.ENTITY_2)); // depends on control dependency: [if], data = [none] prefixId = AnnotationUtility.extractAsString(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.ID_NAME); // depends on control dependency: [if], data = [none] tableName = AnnotationUtility.extractAsString(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.TABLE_NAME); // depends on control dependency: [if], data = [none] tableName = AnnotationUtility.extractAsString(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.TABLE_NAME); // depends on control dependency: [if], data = [none] immutable = AnnotationUtility.extractAsBoolean(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.IMMUTABLE); // depends on control dependency: [if], data = [none] generatedMethods=AnnotationUtility.extractAsBoolean(daoElement, BindDaoMany2Many.class, AnnotationAttributeType.METHODS); // depends on control dependency: [if], data = [none] entityName = entity1.simpleName() + entity2.simpleName(); // depends on control dependency: [if], data = [none] pkg = BaseProcessor.elementUtils.getPackageOf(daoElement); // depends on control dependency: [if], data = [none] packageName = pkg.isUnnamed() ? null : pkg.getQualifiedName().toString(); // depends on control dependency: [if], data = [none] } if (daoElement.getAnnotation(BindDao.class) != null) { // we have @BindDao String derived = AnnotationUtility.extractAsClassName(daoElement, BindDao.class, AnnotationAttributeType.VALUE); ClassName clazz = TypeUtility.className(derived); packageName = clazz.packageName(); // depends on control dependency: [if], data = [none] entityName = clazz.simpleName(); // depends on control dependency: [if], data = [none] String tableTemp = AnnotationUtility.extractAsClassName(daoElement, BindDao.class, AnnotationAttributeType.TABLE_NAME); if (StringUtils.hasText(tableTemp)) { tableName = tableTemp; // depends on control dependency: [if], data = [none] } needToCreate = false; // depends on control dependency: [if], data = [none] } M2MEntity entity = new M2MEntity(daoElement, packageName, entityName, TypeUtility.className(daoElement.asType().toString()), entity1, entity2, prefixId, tableName, needToCreate, generatedMethods, immutable); return entity; } }
public class class_name { @Override public <N extends SpatialComparable> List<List<N>> partition(List<N> spatialObjects, int minEntries, int maxEntries) { List<List<N>> partitions = new ArrayList<>(); List<N> objects = new ArrayList<>(spatialObjects); while (!objects.isEmpty()) { StringBuilder msg = new StringBuilder(); // get the split axis and split point int splitAxis = chooseMaximalExtendedSplitAxis(objects); int splitPoint = chooseBulkSplitPoint(objects.size(), minEntries, maxEntries); if (LOG.isDebugging()) { msg.append("\nsplitAxis ").append(splitAxis); msg.append("\nsplitPoint ").append(splitPoint); } // sort in the right dimension Collections.sort(objects, new SpatialSingleMinComparator(splitAxis)); // insert into partition List<N> partition1 = new ArrayList<>(); for (int i = 0; i < splitPoint; i++) { N o = objects.remove(0); partition1.add(o); } partitions.add(partition1); // copy array if (LOG.isDebugging()) { msg.append("\ncurrent partition ").append(partition1); msg.append("\nremaining objects # ").append(objects.size()); LOG.debugFine(msg.toString()); } } if (LOG.isDebugging()) { LOG.debugFine("partitions " + partitions); } return partitions; } }
public class class_name { @Override public <N extends SpatialComparable> List<List<N>> partition(List<N> spatialObjects, int minEntries, int maxEntries) { List<List<N>> partitions = new ArrayList<>(); List<N> objects = new ArrayList<>(spatialObjects); while (!objects.isEmpty()) { StringBuilder msg = new StringBuilder(); // get the split axis and split point int splitAxis = chooseMaximalExtendedSplitAxis(objects); int splitPoint = chooseBulkSplitPoint(objects.size(), minEntries, maxEntries); if (LOG.isDebugging()) { msg.append("\nsplitAxis ").append(splitAxis); // depends on control dependency: [if], data = [none] msg.append("\nsplitPoint ").append(splitPoint); // depends on control dependency: [if], data = [none] } // sort in the right dimension Collections.sort(objects, new SpatialSingleMinComparator(splitAxis)); // depends on control dependency: [while], data = [none] // insert into partition List<N> partition1 = new ArrayList<>(); for (int i = 0; i < splitPoint; i++) { N o = objects.remove(0); partition1.add(o); // depends on control dependency: [for], data = [none] } partitions.add(partition1); // depends on control dependency: [while], data = [none] // copy array if (LOG.isDebugging()) { msg.append("\ncurrent partition ").append(partition1); // depends on control dependency: [if], data = [none] msg.append("\nremaining objects # ").append(objects.size()); // depends on control dependency: [if], data = [none] LOG.debugFine(msg.toString()); // depends on control dependency: [if], data = [none] } } if (LOG.isDebugging()) { LOG.debugFine("partitions " + partitions); // depends on control dependency: [if], data = [none] } return partitions; } }
public class class_name { @Nonnull public PSDir readDirFromXML (@Nonnull final IMicroElement eDir) { final PSDir ret = new PSDir (); eDir.forAllAttributes ( (sNS, sAttrName, sVal) -> { final String sAttrValue = _getAttributeValue (sVal); if (sAttrName.equals (CSchematronXML.ATTR_VALUE)) ret.setValue (EDirValue.getFromIDOrNull (sAttrValue)); else ret.addForeignAttribute (sAttrName, sAttrValue); }); eDir.forAllChildren (aDirChild -> { switch (aDirChild.getType ()) { case TEXT: ret.addText (((IMicroText) aDirChild).getNodeValue ()); break; case ELEMENT: final IMicroElement eElement = (IMicroElement) aDirChild; if (CSchematron.NAMESPACE_SCHEMATRON.equals (eElement.getNamespaceURI ())) { _warn (ret, "Unsupported Schematron element '" + eElement.getLocalName () + "'"); } else ret.addForeignElement (eElement.getClone ()); break; case COMMENT: // Ignore comments break; default: _warn (ret, "Unsupported child node: " + aDirChild); } }); return ret; } }
public class class_name { @Nonnull public PSDir readDirFromXML (@Nonnull final IMicroElement eDir) { final PSDir ret = new PSDir (); eDir.forAllAttributes ( (sNS, sAttrName, sVal) -> { final String sAttrValue = _getAttributeValue (sVal); if (sAttrName.equals (CSchematronXML.ATTR_VALUE)) ret.setValue (EDirValue.getFromIDOrNull (sAttrValue)); else ret.addForeignAttribute (sAttrName, sAttrValue); }); eDir.forAllChildren (aDirChild -> { switch (aDirChild.getType ()) { case TEXT: ret.addText (((IMicroText) aDirChild).getNodeValue ()); break; case ELEMENT: final IMicroElement eElement = (IMicroElement) aDirChild; if (CSchematron.NAMESPACE_SCHEMATRON.equals (eElement.getNamespaceURI ())) { _warn (ret, "Unsupported Schematron element '" + eElement.getLocalName () + "'"); // depends on control dependency: [if], data = [none] } else ret.addForeignElement (eElement.getClone ()); break; case COMMENT: // Ignore comments break; default: _warn (ret, "Unsupported child node: " + aDirChild); } }); return ret; } }
public class class_name { private void init( List<CalibrationObservation> observations ) { int totalPoints = 0; for (int i = 0; i < observations.size(); i++) { totalPoints += observations.get(i).size(); } A.reshape(2*totalPoints,X.numRows,false); B.reshape(A.numRows,1,false); } }
public class class_name { private void init( List<CalibrationObservation> observations ) { int totalPoints = 0; for (int i = 0; i < observations.size(); i++) { totalPoints += observations.get(i).size(); // depends on control dependency: [for], data = [i] } A.reshape(2*totalPoints,X.numRows,false); B.reshape(A.numRows,1,false); } }
public class class_name { public Long getLongProperty(String pstrSection, String pstrProp) { Long lngRet = null; String strVal = null; INIProperty objProp = null; INISection objSec = null; objSec = (INISection) this.mhmapSections.get(pstrSection); if (objSec != null) { objProp = objSec.getProperty(pstrProp); try { if (objProp != null) { strVal = objProp.getPropValue(); if (strVal != null) lngRet = new Long(strVal); } } catch (NumberFormatException NFExIgnore) { } finally { if (objProp != null) objProp = null; } objSec = null; } return lngRet; } }
public class class_name { public Long getLongProperty(String pstrSection, String pstrProp) { Long lngRet = null; String strVal = null; INIProperty objProp = null; INISection objSec = null; objSec = (INISection) this.mhmapSections.get(pstrSection); if (objSec != null) { objProp = objSec.getProperty(pstrProp); // depends on control dependency: [if], data = [none] try { if (objProp != null) { strVal = objProp.getPropValue(); // depends on control dependency: [if], data = [none] if (strVal != null) lngRet = new Long(strVal); } } catch (NumberFormatException NFExIgnore) { } // depends on control dependency: [catch], data = [none] finally { if (objProp != null) objProp = null; } objSec = null; // depends on control dependency: [if], data = [none] } return lngRet; } }
public class class_name { public Map<String, Set<Privilege>> ymlToPrivileges(String yml) { try { Map<String, Set<Privilege>> map = mapper.readValue(yml, new TypeReference<TreeMap<String, TreeSet<Privilege>>>() { }); map.forEach((msName, privileges) -> privileges.forEach(privilege -> privilege.setMsName(msName))); return Collections.unmodifiableMap(map); } catch (Exception e) { log.error("Failed to create privileges collection from YML file, error: {}", e.getMessage(), e); } return Collections.emptyMap(); } }
public class class_name { public Map<String, Set<Privilege>> ymlToPrivileges(String yml) { try { Map<String, Set<Privilege>> map = mapper.readValue(yml, new TypeReference<TreeMap<String, TreeSet<Privilege>>>() { }); map.forEach((msName, privileges) -> privileges.forEach(privilege -> privilege.setMsName(msName))); // depends on control dependency: [try], data = [none] return Collections.unmodifiableMap(map); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.error("Failed to create privileges collection from YML file, error: {}", e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return Collections.emptyMap(); } }
public class class_name { private static Object[] getInitParameters(Class<?>[] parameterTypes) { int length = parameterTypes.length; Object[] result = new Object[length]; for (int i = 0; i < length; i++) { if (parameterTypes[i].isPrimitive()) { Object init = ClassUtil.getPrimitiveDefaultValue(parameterTypes[i]); result[i] = init; continue; } result[i] = null; } return result; } }
public class class_name { private static Object[] getInitParameters(Class<?>[] parameterTypes) { int length = parameterTypes.length; Object[] result = new Object[length]; for (int i = 0; i < length; i++) { if (parameterTypes[i].isPrimitive()) { Object init = ClassUtil.getPrimitiveDefaultValue(parameterTypes[i]); result[i] = init; // depends on control dependency: [if], data = [none] continue; } result[i] = null; // depends on control dependency: [for], data = [i] } return result; } }
public class class_name { @AsParameterConverter public Trader retrieveTrader(String name) { for (Trader trader : traders) { if (trader.getName().equals(name)) { return trader; } } return mockTradePersister().retrieveTrader(name); } }
public class class_name { @AsParameterConverter public Trader retrieveTrader(String name) { for (Trader trader : traders) { if (trader.getName().equals(name)) { return trader; // depends on control dependency: [if], data = [none] } } return mockTradePersister().retrieveTrader(name); } }
public class class_name { @NoAuth @RequestMapping(value = "/signin", method = RequestMethod.POST) @ResponseBody public JsonObjectBase signin(@Valid SigninForm signin, HttpServletRequest request) { LOG.info(signin.toString()); // 验证 authValidator.validateLogin(signin); // 数据库登录 User user = signMgr.signin(signin.getName()); // 过期时间 int expireTime = LoginConstant.SESSION_EXPIRE_TIME; if (signin.getRemember().equals(1)) { expireTime = LoginConstant.SESSION_EXPIRE_TIME2; } // redis login redisLogin.login(request, user, expireTime); VisitorVo visitorVo = userMgr.getCurVisitor(); return buildSuccess("visitor", visitorVo); } }
public class class_name { @NoAuth @RequestMapping(value = "/signin", method = RequestMethod.POST) @ResponseBody public JsonObjectBase signin(@Valid SigninForm signin, HttpServletRequest request) { LOG.info(signin.toString()); // 验证 authValidator.validateLogin(signin); // 数据库登录 User user = signMgr.signin(signin.getName()); // 过期时间 int expireTime = LoginConstant.SESSION_EXPIRE_TIME; if (signin.getRemember().equals(1)) { expireTime = LoginConstant.SESSION_EXPIRE_TIME2; // depends on control dependency: [if], data = [none] } // redis login redisLogin.login(request, user, expireTime); VisitorVo visitorVo = userMgr.getCurVisitor(); return buildSuccess("visitor", visitorVo); } }
public class class_name { public void reportScreencastRecording(@Observes AfterVideoRecorded event, ReporterConfiguration reporterConfiguration) { Path videoLocation = event.getVideoLocation(); if (videoLocation != null) { videoLocation = Paths.get(videoLocation.toString().replace("flv", "mp4")); final Path rootDir = Paths.get(reporterConfiguration.getRootDirectory()); final Path relativize = rootDir.relativize(videoLocation); final Method testMethod = getTestMethod(event); Reporter.createReport(new TestMethodReport(testMethod.getName())) .addKeyValueEntry(DockerEnvironmentReportKey.VIDEO_PATH, new FileEntry(relativize)) .inSection(new TestMethodSection(testMethod)) .fire(reportEvent); } } }
public class class_name { public void reportScreencastRecording(@Observes AfterVideoRecorded event, ReporterConfiguration reporterConfiguration) { Path videoLocation = event.getVideoLocation(); if (videoLocation != null) { videoLocation = Paths.get(videoLocation.toString().replace("flv", "mp4")); // depends on control dependency: [if], data = [(videoLocation] final Path rootDir = Paths.get(reporterConfiguration.getRootDirectory()); final Path relativize = rootDir.relativize(videoLocation); final Method testMethod = getTestMethod(event); Reporter.createReport(new TestMethodReport(testMethod.getName())) .addKeyValueEntry(DockerEnvironmentReportKey.VIDEO_PATH, new FileEntry(relativize)) .inSection(new TestMethodSection(testMethod)) .fire(reportEvent); // depends on control dependency: [if], data = [none] } } }
public class class_name { public Character toCharacter(final Object value, final Character defaultValue) { final Character result = toCharacter(value); if (result == null) { return defaultValue; } return result; } }
public class class_name { public Character toCharacter(final Object value, final Character defaultValue) { final Character result = toCharacter(value); if (result == null) { return defaultValue; // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { @SafeVarargs public static <M extends AbstractModule> Injector injector(Class<M>... modules) { List<String> names = new ArrayList<String>(modules.length); for (Class<?> klass : modules) { names.add(klass.getName()); } return ngo.injector(JSArray.create(names.toArray(EMPTY_STRING_ARRAY))); } }
public class class_name { @SafeVarargs public static <M extends AbstractModule> Injector injector(Class<M>... modules) { List<String> names = new ArrayList<String>(modules.length); for (Class<?> klass : modules) { names.add(klass.getName()); // depends on control dependency: [for], data = [klass] } return ngo.injector(JSArray.create(names.toArray(EMPTY_STRING_ARRAY))); } }
public class class_name { @SuppressWarnings("squid:S3752") // multiple methods required @RequestMapping(method = {RequestMethod.GET, RequestMethod.POST}) public String forwardDefaultMenuDefaultPlugin(Model model) { Menu menu = menuReaderService .getMenu() .orElseThrow(() -> new RuntimeException("main menu does not exist")); String menuId = menu.getId(); model.addAttribute(KEY_MENU_ID, menuId); Optional<MenuItem> optionalActiveItem = menu.firstItem(); if (!optionalActiveItem.isPresent()) { LOG.warn("main menu does not contain any (accessible) items"); return "forward:/login"; } MenuItem activeItem = optionalActiveItem.get(); String pluginId = activeItem.getId(); String contextUri = URI + '/' + menuId + '/' + pluginId; addModelAttributes(model, contextUri); return getForwardPluginUri(activeItem.getId(), null, getQueryString(activeItem)); } }
public class class_name { @SuppressWarnings("squid:S3752") // multiple methods required @RequestMapping(method = {RequestMethod.GET, RequestMethod.POST}) public String forwardDefaultMenuDefaultPlugin(Model model) { Menu menu = menuReaderService .getMenu() .orElseThrow(() -> new RuntimeException("main menu does not exist")); String menuId = menu.getId(); model.addAttribute(KEY_MENU_ID, menuId); Optional<MenuItem> optionalActiveItem = menu.firstItem(); if (!optionalActiveItem.isPresent()) { LOG.warn("main menu does not contain any (accessible) items"); // depends on control dependency: [if], data = [none] return "forward:/login"; // depends on control dependency: [if], data = [none] } MenuItem activeItem = optionalActiveItem.get(); String pluginId = activeItem.getId(); String contextUri = URI + '/' + menuId + '/' + pluginId; addModelAttributes(model, contextUri); return getForwardPluginUri(activeItem.getId(), null, getQueryString(activeItem)); } }
public class class_name { public void emitItem(Object n) { Level lvl = currentLevel(); switch(lvl.status) { case seq: { Level parent = parentLevel(); if(parent.status == LevelStatus.mapx && lvl.ncount == 0) { if(parent.ncount % 2 == 0 && lvl.anctag == 0) { lvl.spaces = parent.spaces; } } else if(lvl.anctag == 0 && parent.status == LevelStatus.seq && lvl.ncount == 0) { int spcs = (lvl.spaces - parent.spaces ) - 2; if(spcs >= 0) { for(int i = 0; i < spcs; i++) { write(SPACE, 1); } write(DASH_SPACE, 2); break; } } emitIndent(); write(DASH_SPACE, 2); break; } case iseq: if(lvl.ncount > 0) { write(COMMA_SPACE, 2); } break; case map: { Level parent = parentLevel(); if(lvl.anctag == 0 && parent.status == LevelStatus.seq && lvl.ncount == 0) { int spcs = (lvl.spaces - parent.spaces) - 2; if(spcs >= 0) { for(int i = 0; i < spcs; i++) { write(SPACE, 1); } break; } } if(lvl.ncount % 2 == 0) { emitIndent(); } else { write(COLON_SPACE, 2); } break; } case mapx: { if(lvl.ncount % 2 == 0) { emitIndent(); lvl.status = LevelStatus.map; } else { if(lvl.spaces > 0) { byte[] spcs = new byte[lvl.spaces]; java.util.Arrays.fill(spcs, (byte)' '); write(Pointer.create(spcs, 0), lvl.spaces); } write(COLON_SPACE, 2); } break; } case imap: { if(lvl.ncount > 0) { if(lvl.ncount % 2 == 0) { write(COMMA_SPACE, 2); } else { write(COLON_SPACE, 2); } } break; } default: break; } lvl.ncount++; emit(n); } }
public class class_name { public void emitItem(Object n) { Level lvl = currentLevel(); switch(lvl.status) { case seq: { Level parent = parentLevel(); if(parent.status == LevelStatus.mapx && lvl.ncount == 0) { if(parent.ncount % 2 == 0 && lvl.anctag == 0) { lvl.spaces = parent.spaces; // depends on control dependency: [if], data = [none] } } else if(lvl.anctag == 0 && parent.status == LevelStatus.seq && lvl.ncount == 0) { int spcs = (lvl.spaces - parent.spaces ) - 2; if(spcs >= 0) { for(int i = 0; i < spcs; i++) { write(SPACE, 1); // depends on control dependency: [for], data = [none] } write(DASH_SPACE, 2); // depends on control dependency: [if], data = [none] break; } } emitIndent(); write(DASH_SPACE, 2); break; } case iseq: if(lvl.ncount > 0) { write(COMMA_SPACE, 2); // depends on control dependency: [if], data = [none] } break; case map: { Level parent = parentLevel(); if(lvl.anctag == 0 && parent.status == LevelStatus.seq && lvl.ncount == 0) { int spcs = (lvl.spaces - parent.spaces) - 2; if(spcs >= 0) { for(int i = 0; i < spcs; i++) { write(SPACE, 1); // depends on control dependency: [for], data = [none] } break; } } if(lvl.ncount % 2 == 0) { emitIndent(); // depends on control dependency: [if], data = [none] } else { write(COLON_SPACE, 2); // depends on control dependency: [if], data = [none] } break; } case mapx: { if(lvl.ncount % 2 == 0) { emitIndent(); lvl.status = LevelStatus.map; } else { if(lvl.spaces > 0) { byte[] spcs = new byte[lvl.spaces]; java.util.Arrays.fill(spcs, (byte)' '); write(Pointer.create(spcs, 0), lvl.spaces); } write(COLON_SPACE, 2); } break; } case imap: { if(lvl.ncount > 0) { if(lvl.ncount % 2 == 0) { write(COMMA_SPACE, 2); } else { write(COLON_SPACE, 2); } } break; } default: break; } lvl.ncount++; emit(n); } }
public class class_name { @SuppressWarnings("unchecked") public void postValidate(Object object) throws DataValidationException { if (_values != null || _ranges != null) { if (_values != null) for (Object value: _values) { if (value.equals(object)) return; } if (_ranges != null) for (@SuppressWarnings("rawtypes") Range r: _ranges) { @SuppressWarnings("rawtypes") Comparable o = (Comparable)object; if (r.inclusive) { if ((r.min == null || r.min.compareTo(o) <= 0) && (r.max == null || o.compareTo(r.max) <= 0)) { return; } } else { if ((r.min == null || r.min.compareTo(o) < 0) && (r.max == null || o.compareTo(r.max) < 0)) { return; } } } throw new DataValidationException("VALUES/RANGES", _name, object); } } }
public class class_name { @SuppressWarnings("unchecked") public void postValidate(Object object) throws DataValidationException { if (_values != null || _ranges != null) { if (_values != null) for (Object value: _values) { if (value.equals(object)) return; } if (_ranges != null) for (@SuppressWarnings("rawtypes") Range r: _ranges) { @SuppressWarnings("rawtypes") Comparable o = (Comparable)object; if (r.inclusive) { if ((r.min == null || r.min.compareTo(o) <= 0) && (r.max == null || o.compareTo(r.max) <= 0)) { return; // depends on control dependency: [if], data = [none] } } else { if ((r.min == null || r.min.compareTo(o) < 0) && (r.max == null || o.compareTo(r.max) < 0)) { return; // depends on control dependency: [if], data = [none] } } } throw new DataValidationException("VALUES/RANGES", _name, object); } } }
public class class_name { public static synchronized UserGroupInformation login(String principal, String keytab) { // resolve the requested principal, if it is present String finalPrincipal = null; if (principal != null && !principal.isEmpty()) { try { // resolves _HOST pattern using standard Hadoop search/replace // via DNS lookup when 2nd argument is empty finalPrincipal = SecurityUtil.getServerPrincipal(principal, ""); } catch (IOException e) { throw new SecurityException( "Failed to resolve Kerberos principal", e); } } // check if there is a user already logged in UserGroupInformation currentUser = null; try { currentUser = UserGroupInformation.getLoginUser(); } catch (IOException e) { // not a big deal but this shouldn't typically happen because it will // generally fall back to the UNIX user LOG.debug("Unable to get login user before Kerberos auth attempt", e); } // if the current user is valid (matches the given principal) then use it if (currentUser != null) { if (finalPrincipal == null || finalPrincipal.equals(currentUser.getUserName())) { LOG.debug("Using existing login for {}: {}", finalPrincipal, currentUser); return currentUser; } else { // be cruel and unusual when user tries to login as multiple principals // this isn't really valid with a reconfigure but this should be rare // enough to warrant a restart of the agent JVM // TODO: find a way to interrogate the entire current config state, // since we don't have to be unnecessarily protective if they switch all // HDFS sinks to use a different principal all at once. throw new SecurityException( "Cannot use multiple Kerberos principals: " + finalPrincipal + " would replace " + currentUser.getUserName()); } } // prepare for a new login Preconditions.checkArgument(principal != null && !principal.isEmpty(), "Invalid Kerberos principal: " + String.valueOf(principal)); Preconditions.checkNotNull(finalPrincipal, "Resolved principal must not be null"); Preconditions.checkArgument(keytab != null && !keytab.isEmpty(), "Invalid Kerberos keytab: " + String.valueOf(keytab)); File keytabFile = new File(keytab); Preconditions.checkArgument(keytabFile.isFile() && keytabFile.canRead(), "Keytab is not a readable file: " + String.valueOf(keytab)); try { // attempt static kerberos login LOG.debug("Logging in as {} with {}", finalPrincipal, keytab); UserGroupInformation.loginUserFromKeytab(principal, keytab); return UserGroupInformation.getLoginUser(); } catch (IOException e) { throw new SecurityException("Kerberos login failed", e); } } }
public class class_name { public static synchronized UserGroupInformation login(String principal, String keytab) { // resolve the requested principal, if it is present String finalPrincipal = null; if (principal != null && !principal.isEmpty()) { try { // resolves _HOST pattern using standard Hadoop search/replace // via DNS lookup when 2nd argument is empty finalPrincipal = SecurityUtil.getServerPrincipal(principal, ""); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new SecurityException( "Failed to resolve Kerberos principal", e); } // depends on control dependency: [catch], data = [none] } // check if there is a user already logged in UserGroupInformation currentUser = null; try { currentUser = UserGroupInformation.getLoginUser(); // depends on control dependency: [try], data = [none] } catch (IOException e) { // not a big deal but this shouldn't typically happen because it will // generally fall back to the UNIX user LOG.debug("Unable to get login user before Kerberos auth attempt", e); } // depends on control dependency: [catch], data = [none] // if the current user is valid (matches the given principal) then use it if (currentUser != null) { if (finalPrincipal == null || finalPrincipal.equals(currentUser.getUserName())) { LOG.debug("Using existing login for {}: {}", finalPrincipal, currentUser); // depends on control dependency: [if], data = [none] return currentUser; // depends on control dependency: [if], data = [none] } else { // be cruel and unusual when user tries to login as multiple principals // this isn't really valid with a reconfigure but this should be rare // enough to warrant a restart of the agent JVM // TODO: find a way to interrogate the entire current config state, // since we don't have to be unnecessarily protective if they switch all // HDFS sinks to use a different principal all at once. throw new SecurityException( "Cannot use multiple Kerberos principals: " + finalPrincipal + " would replace " + currentUser.getUserName()); } } // prepare for a new login Preconditions.checkArgument(principal != null && !principal.isEmpty(), "Invalid Kerberos principal: " + String.valueOf(principal)); Preconditions.checkNotNull(finalPrincipal, "Resolved principal must not be null"); Preconditions.checkArgument(keytab != null && !keytab.isEmpty(), "Invalid Kerberos keytab: " + String.valueOf(keytab)); File keytabFile = new File(keytab); Preconditions.checkArgument(keytabFile.isFile() && keytabFile.canRead(), "Keytab is not a readable file: " + String.valueOf(keytab)); try { // attempt static kerberos login LOG.debug("Logging in as {} with {}", finalPrincipal, keytab); // depends on control dependency: [try], data = [none] UserGroupInformation.loginUserFromKeytab(principal, keytab); // depends on control dependency: [try], data = [none] return UserGroupInformation.getLoginUser(); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new SecurityException("Kerberos login failed", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { Item newHandleItem(final int tag, final String owner, final String name, final String desc, final boolean itf) { key4.set(HANDLE_BASE + tag, owner, name, desc); Item result = get(key4); if (result == null) { if (tag <= Opcodes.H_PUTSTATIC) { put112(HANDLE, tag, newField(owner, name, desc)); } else { put112(HANDLE, tag, newMethod(owner, name, desc, itf)); } result = new Item(index++, key4); put(result); } return result; } }
public class class_name { Item newHandleItem(final int tag, final String owner, final String name, final String desc, final boolean itf) { key4.set(HANDLE_BASE + tag, owner, name, desc); Item result = get(key4); if (result == null) { if (tag <= Opcodes.H_PUTSTATIC) { put112(HANDLE, tag, newField(owner, name, desc)); // depends on control dependency: [if], data = [none] } else { put112(HANDLE, tag, newMethod(owner, name, desc, itf)); // depends on control dependency: [if], data = [none] } result = new Item(index++, key4); // depends on control dependency: [if], data = [none] put(result); // depends on control dependency: [if], data = [(result] } return result; } }
public class class_name { public final EObject ruleJvmParameterizedTypeReference() throws RecognitionException { EObject current = null; Token otherlv_1=null; Token otherlv_3=null; Token otherlv_5=null; Token otherlv_7=null; Token otherlv_9=null; Token otherlv_11=null; Token otherlv_13=null; EObject lv_arguments_2_0 = null; EObject lv_arguments_4_0 = null; EObject lv_arguments_10_0 = null; EObject lv_arguments_12_0 = null; enterRule(); try { // InternalXbase.g:5611:2: ( ( ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? ) ) // InternalXbase.g:5612:2: ( ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? ) { // InternalXbase.g:5612:2: ( ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? ) // InternalXbase.g:5613:3: ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? { // InternalXbase.g:5613:3: ( ( ruleQualifiedName ) ) // InternalXbase.g:5614:4: ( ruleQualifiedName ) { // InternalXbase.g:5614:4: ( ruleQualifiedName ) // InternalXbase.g:5615:5: ruleQualifiedName { if ( state.backtracking==0 ) { if (current==null) { current = createModelElement(grammarAccess.getJvmParameterizedTypeReferenceRule()); } } if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getTypeJvmTypeCrossReference_0_0()); } pushFollow(FOLLOW_74); ruleQualifiedName(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { afterParserOrEnumRuleCall(); } } } // InternalXbase.g:5629:3: ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? int alt101=2; alt101 = dfa101.predict(input); switch (alt101) { case 1 : // InternalXbase.g:5630:4: ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* { // InternalXbase.g:5630:4: ( ( '<' )=>otherlv_1= '<' ) // InternalXbase.g:5631:5: ( '<' )=>otherlv_1= '<' { otherlv_1=(Token)match(input,19,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_1, grammarAccess.getJvmParameterizedTypeReferenceAccess().getLessThanSignKeyword_1_0()); } } // InternalXbase.g:5637:4: ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5638:5: (lv_arguments_2_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5638:5: (lv_arguments_2_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5639:6: lv_arguments_2_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_1_0()); } pushFollow(FOLLOW_26); lv_arguments_2_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); } add( current, "arguments", lv_arguments_2_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); afterParserOrEnumRuleCall(); } } } // InternalXbase.g:5656:4: (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* loop97: do { int alt97=2; int LA97_0 = input.LA(1); if ( (LA97_0==48) ) { alt97=1; } switch (alt97) { case 1 : // InternalXbase.g:5657:5: otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) { otherlv_3=(Token)match(input,48,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_3, grammarAccess.getJvmParameterizedTypeReferenceAccess().getCommaKeyword_1_2_0()); } // InternalXbase.g:5661:5: ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5662:6: (lv_arguments_4_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5662:6: (lv_arguments_4_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5663:7: lv_arguments_4_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_2_1_0()); } pushFollow(FOLLOW_26); lv_arguments_4_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); } add( current, "arguments", lv_arguments_4_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); afterParserOrEnumRuleCall(); } } } } break; default : break loop97; } } while (true); otherlv_5=(Token)match(input,20,FOLLOW_70); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_5, grammarAccess.getJvmParameterizedTypeReferenceAccess().getGreaterThanSignKeyword_1_3()); } // InternalXbase.g:5685:4: ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* loop100: do { int alt100=2; int LA100_0 = input.LA(1); if ( (LA100_0==45) ) { int LA100_2 = input.LA(2); if ( (LA100_2==RULE_ID) ) { int LA100_3 = input.LA(3); if ( (synpred45_InternalXbase()) ) { alt100=1; } } } switch (alt100) { case 1 : // InternalXbase.g:5686:5: ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? { // InternalXbase.g:5686:5: ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) // InternalXbase.g:5687:6: ( ( () '.' ) )=> ( () otherlv_7= '.' ) { // InternalXbase.g:5693:6: ( () otherlv_7= '.' ) // InternalXbase.g:5694:7: () otherlv_7= '.' { // InternalXbase.g:5694:7: () // InternalXbase.g:5695:8: { if ( state.backtracking==0 ) { current = forceCreateModelElementAndSet( grammarAccess.getJvmParameterizedTypeReferenceAccess().getJvmInnerTypeReferenceOuterAction_1_4_0_0_0(), current); } } otherlv_7=(Token)match(input,45,FOLLOW_61); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_7, grammarAccess.getJvmParameterizedTypeReferenceAccess().getFullStopKeyword_1_4_0_0_1()); } } } // InternalXbase.g:5707:5: ( ( ruleValidID ) ) // InternalXbase.g:5708:6: ( ruleValidID ) { // InternalXbase.g:5708:6: ( ruleValidID ) // InternalXbase.g:5709:7: ruleValidID { if ( state.backtracking==0 ) { if (current==null) { current = createModelElement(grammarAccess.getJvmParameterizedTypeReferenceRule()); } } if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getTypeJvmTypeCrossReference_1_4_1_0()); } pushFollow(FOLLOW_75); ruleValidID(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { afterParserOrEnumRuleCall(); } } } // InternalXbase.g:5723:5: ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? int alt99=2; alt99 = dfa99.predict(input); switch (alt99) { case 1 : // InternalXbase.g:5724:6: ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' { // InternalXbase.g:5724:6: ( ( '<' )=>otherlv_9= '<' ) // InternalXbase.g:5725:7: ( '<' )=>otherlv_9= '<' { otherlv_9=(Token)match(input,19,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_9, grammarAccess.getJvmParameterizedTypeReferenceAccess().getLessThanSignKeyword_1_4_2_0()); } } // InternalXbase.g:5731:6: ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5732:7: (lv_arguments_10_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5732:7: (lv_arguments_10_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5733:8: lv_arguments_10_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_1_0()); } pushFollow(FOLLOW_26); lv_arguments_10_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); } add( current, "arguments", lv_arguments_10_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); afterParserOrEnumRuleCall(); } } } // InternalXbase.g:5750:6: (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* loop98: do { int alt98=2; int LA98_0 = input.LA(1); if ( (LA98_0==48) ) { alt98=1; } switch (alt98) { case 1 : // InternalXbase.g:5751:7: otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) { otherlv_11=(Token)match(input,48,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_11, grammarAccess.getJvmParameterizedTypeReferenceAccess().getCommaKeyword_1_4_2_2_0()); } // InternalXbase.g:5755:7: ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5756:8: (lv_arguments_12_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5756:8: (lv_arguments_12_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5757:9: lv_arguments_12_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_2_1_0()); } pushFollow(FOLLOW_26); lv_arguments_12_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); } add( current, "arguments", lv_arguments_12_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); afterParserOrEnumRuleCall(); } } } } break; default : break loop98; } } while (true); otherlv_13=(Token)match(input,20,FOLLOW_70); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_13, grammarAccess.getJvmParameterizedTypeReferenceAccess().getGreaterThanSignKeyword_1_4_2_3()); } } break; } } break; default : break loop100; } } while (true); } break; } } } if ( state.backtracking==0 ) { leaveRule(); } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public final EObject ruleJvmParameterizedTypeReference() throws RecognitionException { EObject current = null; Token otherlv_1=null; Token otherlv_3=null; Token otherlv_5=null; Token otherlv_7=null; Token otherlv_9=null; Token otherlv_11=null; Token otherlv_13=null; EObject lv_arguments_2_0 = null; EObject lv_arguments_4_0 = null; EObject lv_arguments_10_0 = null; EObject lv_arguments_12_0 = null; enterRule(); try { // InternalXbase.g:5611:2: ( ( ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? ) ) // InternalXbase.g:5612:2: ( ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? ) { // InternalXbase.g:5612:2: ( ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? ) // InternalXbase.g:5613:3: ( ( ruleQualifiedName ) ) ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? { // InternalXbase.g:5613:3: ( ( ruleQualifiedName ) ) // InternalXbase.g:5614:4: ( ruleQualifiedName ) { // InternalXbase.g:5614:4: ( ruleQualifiedName ) // InternalXbase.g:5615:5: ruleQualifiedName { if ( state.backtracking==0 ) { if (current==null) { current = createModelElement(grammarAccess.getJvmParameterizedTypeReferenceRule()); // depends on control dependency: [if], data = [none] } } if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getTypeJvmTypeCrossReference_0_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_74); ruleQualifiedName(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } // InternalXbase.g:5629:3: ( ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* )? int alt101=2; alt101 = dfa101.predict(input); switch (alt101) { case 1 : // InternalXbase.g:5630:4: ( ( '<' )=>otherlv_1= '<' ) ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* otherlv_5= '>' ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* { // InternalXbase.g:5630:4: ( ( '<' )=>otherlv_1= '<' ) // InternalXbase.g:5631:5: ( '<' )=>otherlv_1= '<' { otherlv_1=(Token)match(input,19,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_1, grammarAccess.getJvmParameterizedTypeReferenceAccess().getLessThanSignKeyword_1_0()); // depends on control dependency: [if], data = [none] } } // InternalXbase.g:5637:4: ( (lv_arguments_2_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5638:5: (lv_arguments_2_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5638:5: (lv_arguments_2_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5639:6: lv_arguments_2_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_26); lv_arguments_2_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); // depends on control dependency: [if], data = [none] } add( current, "arguments", lv_arguments_2_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } // InternalXbase.g:5656:4: (otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) )* loop97: do { int alt97=2; int LA97_0 = input.LA(1); if ( (LA97_0==48) ) { alt97=1; // depends on control dependency: [if], data = [none] } switch (alt97) { case 1 : // InternalXbase.g:5657:5: otherlv_3= ',' ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) { otherlv_3=(Token)match(input,48,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_3, grammarAccess.getJvmParameterizedTypeReferenceAccess().getCommaKeyword_1_2_0()); // depends on control dependency: [if], data = [none] } // InternalXbase.g:5661:5: ( (lv_arguments_4_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5662:6: (lv_arguments_4_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5662:6: (lv_arguments_4_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5663:7: lv_arguments_4_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_2_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_26); lv_arguments_4_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); // depends on control dependency: [if], data = [none] } add( current, "arguments", lv_arguments_4_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } } break; default : break loop97; } } while (true); otherlv_5=(Token)match(input,20,FOLLOW_70); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_5, grammarAccess.getJvmParameterizedTypeReferenceAccess().getGreaterThanSignKeyword_1_3()); // depends on control dependency: [if], data = [none] } // InternalXbase.g:5685:4: ( ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? )* loop100: do { int alt100=2; int LA100_0 = input.LA(1); if ( (LA100_0==45) ) { int LA100_2 = input.LA(2); if ( (LA100_2==RULE_ID) ) { int LA100_3 = input.LA(3); if ( (synpred45_InternalXbase()) ) { alt100=1; // depends on control dependency: [if], data = [none] } } } switch (alt100) { case 1 : // InternalXbase.g:5686:5: ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) ( ( ruleValidID ) ) ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? { // InternalXbase.g:5686:5: ( ( ( () '.' ) )=> ( () otherlv_7= '.' ) ) // InternalXbase.g:5687:6: ( ( () '.' ) )=> ( () otherlv_7= '.' ) { // InternalXbase.g:5693:6: ( () otherlv_7= '.' ) // InternalXbase.g:5694:7: () otherlv_7= '.' { // InternalXbase.g:5694:7: () // InternalXbase.g:5695:8: { if ( state.backtracking==0 ) { current = forceCreateModelElementAndSet( grammarAccess.getJvmParameterizedTypeReferenceAccess().getJvmInnerTypeReferenceOuterAction_1_4_0_0_0(), current); // depends on control dependency: [if], data = [none] } } otherlv_7=(Token)match(input,45,FOLLOW_61); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_7, grammarAccess.getJvmParameterizedTypeReferenceAccess().getFullStopKeyword_1_4_0_0_1()); // depends on control dependency: [if], data = [none] } } } // InternalXbase.g:5707:5: ( ( ruleValidID ) ) // InternalXbase.g:5708:6: ( ruleValidID ) { // InternalXbase.g:5708:6: ( ruleValidID ) // InternalXbase.g:5709:7: ruleValidID { if ( state.backtracking==0 ) { if (current==null) { current = createModelElement(grammarAccess.getJvmParameterizedTypeReferenceRule()); // depends on control dependency: [if], data = [none] } } if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getTypeJvmTypeCrossReference_1_4_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_75); ruleValidID(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } // InternalXbase.g:5723:5: ( ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' )? int alt99=2; alt99 = dfa99.predict(input); switch (alt99) { case 1 : // InternalXbase.g:5724:6: ( ( '<' )=>otherlv_9= '<' ) ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* otherlv_13= '>' { // InternalXbase.g:5724:6: ( ( '<' )=>otherlv_9= '<' ) // InternalXbase.g:5725:7: ( '<' )=>otherlv_9= '<' { otherlv_9=(Token)match(input,19,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_9, grammarAccess.getJvmParameterizedTypeReferenceAccess().getLessThanSignKeyword_1_4_2_0()); // depends on control dependency: [if], data = [none] } } // InternalXbase.g:5731:6: ( (lv_arguments_10_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5732:7: (lv_arguments_10_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5732:7: (lv_arguments_10_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5733:8: lv_arguments_10_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_26); lv_arguments_10_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); // depends on control dependency: [if], data = [none] } add( current, "arguments", lv_arguments_10_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } // InternalXbase.g:5750:6: (otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) )* loop98: do { int alt98=2; int LA98_0 = input.LA(1); if ( (LA98_0==48) ) { alt98=1; // depends on control dependency: [if], data = [none] } switch (alt98) { case 1 : // InternalXbase.g:5751:7: otherlv_11= ',' ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) { otherlv_11=(Token)match(input,48,FOLLOW_25); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_11, grammarAccess.getJvmParameterizedTypeReferenceAccess().getCommaKeyword_1_4_2_2_0()); // depends on control dependency: [if], data = [none] } // InternalXbase.g:5755:7: ( (lv_arguments_12_0= ruleJvmArgumentTypeReference ) ) // InternalXbase.g:5756:8: (lv_arguments_12_0= ruleJvmArgumentTypeReference ) { // InternalXbase.g:5756:8: (lv_arguments_12_0= ruleJvmArgumentTypeReference ) // InternalXbase.g:5757:9: lv_arguments_12_0= ruleJvmArgumentTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getJvmParameterizedTypeReferenceAccess().getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_2_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_26); lv_arguments_12_0=ruleJvmArgumentTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getJvmParameterizedTypeReferenceRule()); // depends on control dependency: [if], data = [none] } add( current, "arguments", lv_arguments_12_0, "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference"); // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } } break; default : break loop98; } } while (true); otherlv_13=(Token)match(input,20,FOLLOW_70); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_13, grammarAccess.getJvmParameterizedTypeReferenceAccess().getGreaterThanSignKeyword_1_4_2_3()); // depends on control dependency: [if], data = [none] } } break; } } break; default : break loop100; } } while (true); } break; } } } if ( state.backtracking==0 ) { leaveRule(); // depends on control dependency: [if], data = [none] } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public <T> T parse(CharSequence text, TemporalQuery<T> query) { Objects.requireNonNull(text, "text"); Objects.requireNonNull(query, "query"); try { return parseResolved0(text, null).query(query); } catch (DateTimeParseException ex) { throw ex; } catch (RuntimeException ex) { throw createError(text, ex); } } }
public class class_name { public <T> T parse(CharSequence text, TemporalQuery<T> query) { Objects.requireNonNull(text, "text"); Objects.requireNonNull(query, "query"); try { return parseResolved0(text, null).query(query); // depends on control dependency: [try], data = [none] } catch (DateTimeParseException ex) { throw ex; } catch (RuntimeException ex) { // depends on control dependency: [catch], data = [none] throw createError(text, ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { void printBootstrapInfo(BootstrapInfo bsInfo) { if (bsInfo == null) return; Log.d(LOGTAG, "printBootstrapInfo"); List<BootstrapProfile> profiles = bsInfo.getProfiles(); if (profiles != null) { for (BootstrapProfile profile : profiles) { Log.d(LOGTAG, profile.toString()); } } else { Log.d(LOGTAG, "Profiles are null"); } } }
public class class_name { void printBootstrapInfo(BootstrapInfo bsInfo) { if (bsInfo == null) return; Log.d(LOGTAG, "printBootstrapInfo"); List<BootstrapProfile> profiles = bsInfo.getProfiles(); if (profiles != null) { for (BootstrapProfile profile : profiles) { Log.d(LOGTAG, profile.toString()); // depends on control dependency: [for], data = [profile] } } else { Log.d(LOGTAG, "Profiles are null"); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected String getDeclaratorSimpleName(JvmIdentifiableElement element) { if (element instanceof JvmConstructor) { return null; } if (element instanceof JvmMember) { return ((JvmMember) element).getDeclaringType().getSimpleName(); } return null; } }
public class class_name { protected String getDeclaratorSimpleName(JvmIdentifiableElement element) { if (element instanceof JvmConstructor) { return null; // depends on control dependency: [if], data = [none] } if (element instanceof JvmMember) { return ((JvmMember) element).getDeclaringType().getSimpleName(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { private void findMissingParityFiles(String[] args, int startIndex) { boolean restoreReplication = false; Path root = null; for (int i = startIndex; i < args.length; i++) { String arg = args[i]; if (arg.equals("-r")) { restoreReplication = true; } else { root = new Path(arg); } } if (root == null) { throw new IllegalArgumentException("Too few arguments"); } try { FileSystem fs = root.getFileSystem(conf); // Make sure default uri is the same as root conf.set(FileSystem.FS_DEFAULT_NAME_KEY, fs.getUri().toString()); MissingParityFiles mParFiles = new MissingParityFiles(conf, restoreReplication); mParFiles.findMissingParityFiles(root, System.out); } catch (IOException ex) { System.err.println("findMissingParityFiles: " + ex); } } }
public class class_name { private void findMissingParityFiles(String[] args, int startIndex) { boolean restoreReplication = false; Path root = null; for (int i = startIndex; i < args.length; i++) { String arg = args[i]; if (arg.equals("-r")) { restoreReplication = true; // depends on control dependency: [if], data = [none] } else { root = new Path(arg); // depends on control dependency: [if], data = [none] } } if (root == null) { throw new IllegalArgumentException("Too few arguments"); } try { FileSystem fs = root.getFileSystem(conf); // Make sure default uri is the same as root conf.set(FileSystem.FS_DEFAULT_NAME_KEY, fs.getUri().toString()); // depends on control dependency: [try], data = [none] MissingParityFiles mParFiles = new MissingParityFiles(conf, restoreReplication); mParFiles.findMissingParityFiles(root, System.out); // depends on control dependency: [try], data = [none] } catch (IOException ex) { System.err.println("findMissingParityFiles: " + ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { <T> void finish(Request<T> request) { // Remove from the set of requests currently being processed. synchronized (currentRequests) { currentRequests.remove(request); } if (request.shouldCache()) { synchronized (waitingRequests) { String cacheKey = request.getCacheKey(); Queue<Request<?>> waitingRequests = this.waitingRequests.remove(cacheKey); if (waitingRequests != null) { //todo add queue markers // if (JusLog.DEBUG) { // JusLog.v("Releasing %d waiting requests for cacheKey=%s.", // waitingRequests.size(), cacheKey); // } // Process all queued up requests. They won't be considered as in flight, but // that's not a problem as the cache has been primed by 'request'. cacheQueue.addAll(waitingRequests); } } } } }
public class class_name { <T> void finish(Request<T> request) { // Remove from the set of requests currently being processed. synchronized (currentRequests) { currentRequests.remove(request); } if (request.shouldCache()) { synchronized (waitingRequests) { // depends on control dependency: [if], data = [none] String cacheKey = request.getCacheKey(); Queue<Request<?>> waitingRequests = this.waitingRequests.remove(cacheKey); if (waitingRequests != null) { //todo add queue markers // if (JusLog.DEBUG) { // JusLog.v("Releasing %d waiting requests for cacheKey=%s.", // waitingRequests.size(), cacheKey); // } // Process all queued up requests. They won't be considered as in flight, but // that's not a problem as the cache has been primed by 'request'. cacheQueue.addAll(waitingRequests); // depends on control dependency: [if], data = [(waitingRequests] } } } } }
public class class_name { public Observable<ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>> listPoolNodeCountsNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } final AccountListPoolNodeCountsNextOptions accountListPoolNodeCountsNextOptions = null; UUID clientRequestId = null; Boolean returnClientRequestId = null; DateTime ocpDate = null; DateTimeRfc1123 ocpDateConverted = null; if (ocpDate != null) { ocpDateConverted = new DateTimeRfc1123(ocpDate); } String nextUrl = String.format("%s", nextPageLink); return service.listPoolNodeCountsNext(nextUrl, this.client.acceptLanguage(), clientRequestId, returnClientRequestId, ocpDateConverted, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<PageImpl<PoolNodeCounts>, AccountListPoolNodeCountsHeaders> result = listPoolNodeCountsNextDelegate(response); return Observable.just(new ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>(result.body(), result.headers(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>> listPoolNodeCountsNextSinglePageAsync(final String nextPageLink) { if (nextPageLink == null) { throw new IllegalArgumentException("Parameter nextPageLink is required and cannot be null."); } final AccountListPoolNodeCountsNextOptions accountListPoolNodeCountsNextOptions = null; UUID clientRequestId = null; Boolean returnClientRequestId = null; DateTime ocpDate = null; DateTimeRfc1123 ocpDateConverted = null; if (ocpDate != null) { ocpDateConverted = new DateTimeRfc1123(ocpDate); // depends on control dependency: [if], data = [(ocpDate] } String nextUrl = String.format("%s", nextPageLink); return service.listPoolNodeCountsNext(nextUrl, this.client.acceptLanguage(), clientRequestId, returnClientRequestId, ocpDateConverted, this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>>>() { @Override public Observable<ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>> call(Response<ResponseBody> response) { try { ServiceResponseWithHeaders<PageImpl<PoolNodeCounts>, AccountListPoolNodeCountsHeaders> result = listPoolNodeCountsNextDelegate(response); return Observable.just(new ServiceResponseWithHeaders<Page<PoolNodeCounts>, AccountListPoolNodeCountsHeaders>(result.body(), result.headers(), result.response())); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public void marshall(Resource resource, ProtocolMarshaller protocolMarshaller) { if (resource == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resource.getType(), TYPE_BINDING); protocolMarshaller.marshall(resource.getName(), NAME_BINDING); protocolMarshaller.marshall(resource.getArn(), ARN_BINDING); protocolMarshaller.marshall(resource.getFeature(), FEATURE_BINDING); protocolMarshaller.marshall(resource.getAttributes(), ATTRIBUTES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(Resource resource, ProtocolMarshaller protocolMarshaller) { if (resource == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resource.getType(), TYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resource.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resource.getArn(), ARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resource.getFeature(), FEATURE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(resource.getAttributes(), ATTRIBUTES_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void marshall(GetDeploymentRequest getDeploymentRequest, ProtocolMarshaller protocolMarshaller) { if (getDeploymentRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDeploymentRequest.getDeploymentId(), DEPLOYMENTID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetDeploymentRequest getDeploymentRequest, ProtocolMarshaller protocolMarshaller) { if (getDeploymentRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDeploymentRequest.getDeploymentId(), DEPLOYMENTID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public LCMSDataSubset merge(LCMSDataSubset other) { LCMSDataSubset merged = new LCMSDataSubset(); Set<Integer> msLvlsThis = getMsLvls(); Set<Integer> msLvlsThat = other.getMsLvls(); // only merge if both are not null, otherwise null signifies the whole // run, so we can keep it null in the merged version if (msLvlsThis != null && msLvlsThat != null) { HashSet<Integer> mergedMsLvls = new HashSet<>(msLvlsThis); mergedMsLvls.addAll(msLvlsThat); merged.setMsLvls(mergedMsLvls); } // merge mz ranges List<DoubleRange> mzRangesThis = getMzRanges(); List<DoubleRange> mzRangesThat = other.getMzRanges(); if (mzRangesThis != null && mzRangesThat != null) { ArrayList<DoubleRange> mergedMzRanges = new ArrayList<>(mzRangesThis); mergedMzRanges.addAll(mzRangesThat); merged.setMzRanges(mergedMzRanges); } // compare scan number ranges Integer scanNumLoThis = getScanNumLo(); Integer scanNumLoThat = other.getScanNumLo(); if (scanNumLoThis != null && scanNumLoThat != null) { merged.setScanNumLo(Math.min(scanNumLoThis, scanNumLoThat)); } Integer scanNumHiThis = getScanNumHi(); Integer scanNumHiThat = other.getScanNumHi(); if (scanNumHiThis != null && scanNumHiThat != null) { merged.setScanNumHi(Math.max(scanNumHiThis, scanNumHiThat)); } return merged; } }
public class class_name { public LCMSDataSubset merge(LCMSDataSubset other) { LCMSDataSubset merged = new LCMSDataSubset(); Set<Integer> msLvlsThis = getMsLvls(); Set<Integer> msLvlsThat = other.getMsLvls(); // only merge if both are not null, otherwise null signifies the whole // run, so we can keep it null in the merged version if (msLvlsThis != null && msLvlsThat != null) { HashSet<Integer> mergedMsLvls = new HashSet<>(msLvlsThis); mergedMsLvls.addAll(msLvlsThat); // depends on control dependency: [if], data = [none] merged.setMsLvls(mergedMsLvls); // depends on control dependency: [if], data = [none] } // merge mz ranges List<DoubleRange> mzRangesThis = getMzRanges(); List<DoubleRange> mzRangesThat = other.getMzRanges(); if (mzRangesThis != null && mzRangesThat != null) { ArrayList<DoubleRange> mergedMzRanges = new ArrayList<>(mzRangesThis); mergedMzRanges.addAll(mzRangesThat); // depends on control dependency: [if], data = [none] merged.setMzRanges(mergedMzRanges); // depends on control dependency: [if], data = [none] } // compare scan number ranges Integer scanNumLoThis = getScanNumLo(); Integer scanNumLoThat = other.getScanNumLo(); if (scanNumLoThis != null && scanNumLoThat != null) { merged.setScanNumLo(Math.min(scanNumLoThis, scanNumLoThat)); // depends on control dependency: [if], data = [(scanNumLoThis] } Integer scanNumHiThis = getScanNumHi(); Integer scanNumHiThat = other.getScanNumHi(); if (scanNumHiThis != null && scanNumHiThat != null) { merged.setScanNumHi(Math.max(scanNumHiThis, scanNumHiThat)); // depends on control dependency: [if], data = [(scanNumHiThis] } return merged; } }
public class class_name { public SimpleSlot getSlotForTask(CoLocationConstraint constraint, Iterable<TaskManagerLocation> locationPreferences) { synchronized (lock) { if (constraint.isAssignedAndAlive()) { // the shared slot of the co-location group is initialized and set we allocate a sub-slot final SharedSlot shared = constraint.getSharedSlot(); SimpleSlot subslot = shared.allocateSubSlot(null); subslot.setLocality(Locality.LOCAL); return subslot; } else if (constraint.isAssigned()) { // we had an assignment before. SharedSlot previous = constraint.getSharedSlot(); if (previous == null) { throw new IllegalStateException("Bug: Found assigned co-location constraint without a slot."); } TaskManagerLocation location = previous.getTaskManagerLocation(); Tuple2<SharedSlot, Locality> p = getSharedSlotForTask( constraint.getGroupId(), Collections.singleton(location), true); if (p == null) { return null; } else { SharedSlot newSharedSlot = p.f0; // allocate the co-location group slot inside the shared slot SharedSlot constraintGroupSlot = newSharedSlot.allocateSharedSlot(constraint.getGroupId()); if (constraintGroupSlot != null) { constraint.setSharedSlot(constraintGroupSlot); // the sub slots in the co location constraint slot have no group that they belong to // (other than the co-location-constraint slot) SimpleSlot subSlot = constraintGroupSlot.allocateSubSlot(null); subSlot.setLocality(Locality.LOCAL); return subSlot; } else { // could not allocate the co-location-constraint shared slot return null; } } } else { // the location constraint has not been associated with a shared slot, yet. // grab a new slot and initialize the constraint with that one. // preferred locations are defined by the vertex Tuple2<SharedSlot, Locality> p = getSharedSlotForTask(constraint.getGroupId(), locationPreferences, false); if (p == null) { // could not get a shared slot for this co-location-group return null; } else { final SharedSlot availableShared = p.f0; final Locality l = p.f1; // allocate the co-location group slot inside the shared slot SharedSlot constraintGroupSlot = availableShared.allocateSharedSlot(constraint.getGroupId()); // IMPORTANT: We do not lock the location, yet, since we cannot be sure that the // caller really sticks with the slot we picked! constraint.setSharedSlot(constraintGroupSlot); // the sub slots in the co location constraint slot have no group that they belong to // (other than the co-location-constraint slot) SimpleSlot sub = constraintGroupSlot.allocateSubSlot(null); sub.setLocality(l); return sub; } } } } }
public class class_name { public SimpleSlot getSlotForTask(CoLocationConstraint constraint, Iterable<TaskManagerLocation> locationPreferences) { synchronized (lock) { if (constraint.isAssignedAndAlive()) { // the shared slot of the co-location group is initialized and set we allocate a sub-slot final SharedSlot shared = constraint.getSharedSlot(); SimpleSlot subslot = shared.allocateSubSlot(null); subslot.setLocality(Locality.LOCAL); // depends on control dependency: [if], data = [none] return subslot; // depends on control dependency: [if], data = [none] } else if (constraint.isAssigned()) { // we had an assignment before. SharedSlot previous = constraint.getSharedSlot(); if (previous == null) { throw new IllegalStateException("Bug: Found assigned co-location constraint without a slot."); } TaskManagerLocation location = previous.getTaskManagerLocation(); Tuple2<SharedSlot, Locality> p = getSharedSlotForTask( constraint.getGroupId(), Collections.singleton(location), true); if (p == null) { return null; // depends on control dependency: [if], data = [none] } else { SharedSlot newSharedSlot = p.f0; // allocate the co-location group slot inside the shared slot SharedSlot constraintGroupSlot = newSharedSlot.allocateSharedSlot(constraint.getGroupId()); if (constraintGroupSlot != null) { constraint.setSharedSlot(constraintGroupSlot); // depends on control dependency: [if], data = [(constraintGroupSlot] // the sub slots in the co location constraint slot have no group that they belong to // (other than the co-location-constraint slot) SimpleSlot subSlot = constraintGroupSlot.allocateSubSlot(null); subSlot.setLocality(Locality.LOCAL); // depends on control dependency: [if], data = [none] return subSlot; // depends on control dependency: [if], data = [none] } else { // could not allocate the co-location-constraint shared slot return null; // depends on control dependency: [if], data = [none] } } } else { // the location constraint has not been associated with a shared slot, yet. // grab a new slot and initialize the constraint with that one. // preferred locations are defined by the vertex Tuple2<SharedSlot, Locality> p = getSharedSlotForTask(constraint.getGroupId(), locationPreferences, false); if (p == null) { // could not get a shared slot for this co-location-group return null; // depends on control dependency: [if], data = [none] } else { final SharedSlot availableShared = p.f0; final Locality l = p.f1; // allocate the co-location group slot inside the shared slot SharedSlot constraintGroupSlot = availableShared.allocateSharedSlot(constraint.getGroupId()); // IMPORTANT: We do not lock the location, yet, since we cannot be sure that the // caller really sticks with the slot we picked! constraint.setSharedSlot(constraintGroupSlot); // depends on control dependency: [if], data = [none] // the sub slots in the co location constraint slot have no group that they belong to // (other than the co-location-constraint slot) SimpleSlot sub = constraintGroupSlot.allocateSubSlot(null); sub.setLocality(l); // depends on control dependency: [if], data = [none] return sub; // depends on control dependency: [if], data = [none] } } } } }
public class class_name { private static Duration create(long seconds, int nanoAdjustment) { if ((seconds | nanoAdjustment) == 0) { return ZERO; } return new Duration(seconds, nanoAdjustment); } }
public class class_name { private static Duration create(long seconds, int nanoAdjustment) { if ((seconds | nanoAdjustment) == 0) { return ZERO; // depends on control dependency: [if], data = [none] } return new Duration(seconds, nanoAdjustment); } }
public class class_name { public boolean getDebug(final ProcessorDef[] defaultProviders, final int index) { if (isReference()) { return ((ProcessorDef) getCheckedRef(ProcessorDef.class, "ProcessorDef")).getDebug(defaultProviders, index); } if (this.debug != null) { return this.debug.booleanValue(); } else { if (defaultProviders != null && index < defaultProviders.length) { return defaultProviders[index].getDebug(defaultProviders, index + 1); } } return false; } }
public class class_name { public boolean getDebug(final ProcessorDef[] defaultProviders, final int index) { if (isReference()) { return ((ProcessorDef) getCheckedRef(ProcessorDef.class, "ProcessorDef")).getDebug(defaultProviders, index); // depends on control dependency: [if], data = [none] } if (this.debug != null) { return this.debug.booleanValue(); // depends on control dependency: [if], data = [none] } else { if (defaultProviders != null && index < defaultProviders.length) { return defaultProviders[index].getDebug(defaultProviders, index + 1); // depends on control dependency: [if], data = [(defaultProviders] } } return false; } }
public class class_name { public java.util.List<DeleteLaunchTemplateVersionsResponseSuccessItem> getSuccessfullyDeletedLaunchTemplateVersions() { if (successfullyDeletedLaunchTemplateVersions == null) { successfullyDeletedLaunchTemplateVersions = new com.amazonaws.internal.SdkInternalList<DeleteLaunchTemplateVersionsResponseSuccessItem>(); } return successfullyDeletedLaunchTemplateVersions; } }
public class class_name { public java.util.List<DeleteLaunchTemplateVersionsResponseSuccessItem> getSuccessfullyDeletedLaunchTemplateVersions() { if (successfullyDeletedLaunchTemplateVersions == null) { successfullyDeletedLaunchTemplateVersions = new com.amazonaws.internal.SdkInternalList<DeleteLaunchTemplateVersionsResponseSuccessItem>(); // depends on control dependency: [if], data = [none] } return successfullyDeletedLaunchTemplateVersions; } }
public class class_name { public ChooserDialog withOnBackPressedListener(OnBackPressedListener listener) { if (this._onBackPressed instanceof defBackPressed) { ((defBackPressed) this._onBackPressed)._onBackPressed = listener; } return this; } }
public class class_name { public ChooserDialog withOnBackPressedListener(OnBackPressedListener listener) { if (this._onBackPressed instanceof defBackPressed) { ((defBackPressed) this._onBackPressed)._onBackPressed = listener; // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { public String toIPTC(SubjectReferenceSystem srs) { StringBuffer b = new StringBuffer(); b.append("IPTC:"); b.append(getNumber()); b.append(":"); if (getNumber().endsWith("000000")) { b.append(toIPTCHelper(srs.getName(this))); b.append("::"); } else if (getNumber().endsWith("000")) { b.append(toIPTCHelper(srs.getName(srs.get(getNumber().substring(0, 2) + "000000")))); b.append(":"); b.append(toIPTCHelper(srs.getName(this))); b.append(":"); } else { b.append(toIPTCHelper(srs.getName(srs.get(getNumber().substring(0, 2) + "000000")))); b.append(":"); b.append(toIPTCHelper(srs.getName(srs.get(getNumber().substring(0, 5) + "000")))); b.append(":"); b.append(toIPTCHelper(srs.getName(this))); } return b.toString(); } }
public class class_name { public String toIPTC(SubjectReferenceSystem srs) { StringBuffer b = new StringBuffer(); b.append("IPTC:"); b.append(getNumber()); b.append(":"); if (getNumber().endsWith("000000")) { b.append(toIPTCHelper(srs.getName(this))); // depends on control dependency: [if], data = [none] b.append("::"); // depends on control dependency: [if], data = [none] } else if (getNumber().endsWith("000")) { b.append(toIPTCHelper(srs.getName(srs.get(getNumber().substring(0, 2) + "000000")))); // depends on control dependency: [if], data = [none] b.append(":"); // depends on control dependency: [if], data = [none] b.append(toIPTCHelper(srs.getName(this))); // depends on control dependency: [if], data = [none] b.append(":"); // depends on control dependency: [if], data = [none] } else { b.append(toIPTCHelper(srs.getName(srs.get(getNumber().substring(0, 2) + "000000")))); // depends on control dependency: [if], data = [none] b.append(":"); // depends on control dependency: [if], data = [none] b.append(toIPTCHelper(srs.getName(srs.get(getNumber().substring(0, 5) + "000")))); // depends on control dependency: [if], data = [none] b.append(":"); // depends on control dependency: [if], data = [none] b.append(toIPTCHelper(srs.getName(this))); // depends on control dependency: [if], data = [none] } return b.toString(); } }
public class class_name { public static double[] mergeCoords(double[] x, double[] y) { if (x.length != y.length) throw new IllegalArgumentException( "Sample sizes must be the same for each data applyTransformToDestination."); double[] ret = new double[x.length + y.length]; for (int i = 0; i < x.length; i++) { ret[i] = x[i]; ret[i + 1] = y[i]; } return ret; } }
public class class_name { public static double[] mergeCoords(double[] x, double[] y) { if (x.length != y.length) throw new IllegalArgumentException( "Sample sizes must be the same for each data applyTransformToDestination."); double[] ret = new double[x.length + y.length]; for (int i = 0; i < x.length; i++) { ret[i] = x[i]; // depends on control dependency: [for], data = [i] ret[i + 1] = y[i]; // depends on control dependency: [for], data = [i] } return ret; } }
public class class_name { public T nextInSequence() { T result = null; // Poll results from the buffer until no more are available, but only once the state machine has flushed // the buffer. if (flushMode) { result = buffer.poll(); if (result != null) { return result; } flushMode = false; } // Feed input from the source into the state machine, until some results become available on the buffer. while (source.hasNext()) { S next = source.next(); fsm.apply(next); if (flushMode) { result = buffer.poll(); if (result != null) { return result; } flushMode = false; } } // Once the end of the input source is reached, inform the state machine of this, and try and poll any // buffered results. fsm.end(); if (flushMode) { result = buffer.poll(); } return result; } }
public class class_name { public T nextInSequence() { T result = null; // Poll results from the buffer until no more are available, but only once the state machine has flushed // the buffer. if (flushMode) { result = buffer.poll(); // depends on control dependency: [if], data = [none] if (result != null) { return result; // depends on control dependency: [if], data = [none] } flushMode = false; // depends on control dependency: [if], data = [none] } // Feed input from the source into the state machine, until some results become available on the buffer. while (source.hasNext()) { S next = source.next(); fsm.apply(next); // depends on control dependency: [while], data = [none] if (flushMode) { result = buffer.poll(); // depends on control dependency: [if], data = [none] if (result != null) { return result; // depends on control dependency: [if], data = [none] } flushMode = false; // depends on control dependency: [if], data = [none] } } // Once the end of the input source is reached, inform the state machine of this, and try and poll any // buffered results. fsm.end(); if (flushMode) { result = buffer.poll(); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static Map<String, String> extractBrownFeat(String word) { if (!BROWN_CLUSTER.containsKey(word)) { return new HashMap<>(); } String clusterId = BROWN_CLUSTER.get(word); return Arrays.stream(BROWN_PREFIX).mapToObj(p -> { int end = Math.min(p, clusterId.length()); return new AbstractMap.SimpleEntry<>("brown_" + p, clusterId.substring(0, end)); }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } }
public class class_name { public static Map<String, String> extractBrownFeat(String word) { if (!BROWN_CLUSTER.containsKey(word)) { return new HashMap<>(); // depends on control dependency: [if], data = [none] } String clusterId = BROWN_CLUSTER.get(word); return Arrays.stream(BROWN_PREFIX).mapToObj(p -> { int end = Math.min(p, clusterId.length()); return new AbstractMap.SimpleEntry<>("brown_" + p, clusterId.substring(0, end)); }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); } }
public class class_name { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { int itemIdx = -1; for (int i = 0; i < 7; i++) { if (itemIdx == -1 && buttonView == mWeekByDayButtons[i]) { itemIdx = i; mModel.weeklyByDayOfWeek[i] = isChecked; } } updateDialog(); } }
public class class_name { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { int itemIdx = -1; for (int i = 0; i < 7; i++) { if (itemIdx == -1 && buttonView == mWeekByDayButtons[i]) { itemIdx = i; // depends on control dependency: [if], data = [none] mModel.weeklyByDayOfWeek[i] = isChecked; // depends on control dependency: [if], data = [none] } } updateDialog(); } }
public class class_name { public static void postThreadSetting(StringEntity input) { String pageToken = FbBotMillContext.getInstance().getPageToken(); // If the page token is invalid, returns. if (!validatePageToken(pageToken)) { return; } String url = FbBotMillNetworkConstants.FACEBOOK_BASE_URL + FbBotMillNetworkConstants.FACEBOOK_THREAD_SETTINGS_URL + pageToken; postInternal(url, input); } }
public class class_name { public static void postThreadSetting(StringEntity input) { String pageToken = FbBotMillContext.getInstance().getPageToken(); // If the page token is invalid, returns. if (!validatePageToken(pageToken)) { return; // depends on control dependency: [if], data = [none] } String url = FbBotMillNetworkConstants.FACEBOOK_BASE_URL + FbBotMillNetworkConstants.FACEBOOK_THREAD_SETTINGS_URL + pageToken; postInternal(url, input); } }
public class class_name { public RpcResponse call(RpcRequest req) { for (Filter filter : filters) { RpcRequest tmp = filter.alterRequest(req); if (tmp != null) { req = tmp; } } RpcResponse resp = null; for (Filter filter : filters) { resp = filter.preInvoke(req); if (resp != null) { break; } } if (resp == null) { resp = callInternal(req); } for (int i = filters.size() - 1; i >= 0; i--) { RpcResponse tmp = filters.get(i).postInvoke(req, resp); if (tmp != null) { resp = tmp; } } return resp; } }
public class class_name { public RpcResponse call(RpcRequest req) { for (Filter filter : filters) { RpcRequest tmp = filter.alterRequest(req); if (tmp != null) { req = tmp; // depends on control dependency: [if], data = [none] } } RpcResponse resp = null; for (Filter filter : filters) { resp = filter.preInvoke(req); // depends on control dependency: [for], data = [filter] if (resp != null) { break; } } if (resp == null) { resp = callInternal(req); // depends on control dependency: [if], data = [none] } for (int i = filters.size() - 1; i >= 0; i--) { RpcResponse tmp = filters.get(i).postInvoke(req, resp); if (tmp != null) { resp = tmp; // depends on control dependency: [if], data = [none] } } return resp; } }
public class class_name { public static LicenseApi licenseApiFactory() { String licensePath = System.getProperty("user.dir") + "/" + licenseFileName; LicenseApi licenseApi = MiscUtils.licenseApiFactory(licensePath); if (licenseApi == null) { try { // Get location of jar file String jarLoc = VoltDB.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); // Strip of file name int lastSlashOff = jarLoc.lastIndexOf("/"); if (lastSlashOff == -1) { // Jar is at root directory licensePath = "/" + licenseFileName; } else { licensePath = jarLoc.substring(0, lastSlashOff+1) + licenseFileName; } licenseApi = MiscUtils.licenseApiFactory(licensePath); } catch (URISyntaxException e) { } } if (licenseApi == null) { licensePath = System.getProperty("user.home") + "/" + licenseFileName; licenseApi = MiscUtils.licenseApiFactory(licensePath); } if (licenseApi != null) { hostLog.info("Searching for license file located " + licensePath); } return licenseApi; } }
public class class_name { public static LicenseApi licenseApiFactory() { String licensePath = System.getProperty("user.dir") + "/" + licenseFileName; LicenseApi licenseApi = MiscUtils.licenseApiFactory(licensePath); if (licenseApi == null) { try { // Get location of jar file String jarLoc = VoltDB.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath(); // Strip of file name int lastSlashOff = jarLoc.lastIndexOf("/"); if (lastSlashOff == -1) { // Jar is at root directory licensePath = "/" + licenseFileName; // depends on control dependency: [if], data = [none] } else { licensePath = jarLoc.substring(0, lastSlashOff+1) + licenseFileName; // depends on control dependency: [if], data = [none] } licenseApi = MiscUtils.licenseApiFactory(licensePath); // depends on control dependency: [try], data = [none] } catch (URISyntaxException e) { } // depends on control dependency: [catch], data = [none] } if (licenseApi == null) { licensePath = System.getProperty("user.home") + "/" + licenseFileName; licenseApi = MiscUtils.licenseApiFactory(licensePath); // depends on control dependency: [if], data = [none] } if (licenseApi != null) { hostLog.info("Searching for license file located " + licensePath); // depends on control dependency: [if], data = [none] } return licenseApi; } }
public class class_name { public final void waitForFullService() { if (!m_state.isInitialized()) { throw new RuntimeException("Service has not been initialized"); } synchronized (m_stateChangeLock) { // Loop until state >= RUNNING while (!m_state.isRunning()) { try { m_stateChangeLock.wait(); } catch (InterruptedException e) { } } if (m_state.isStopping()) { throw new RuntimeException("Service " + this.getClass().getSimpleName() + " failed before reaching running state"); } } } }
public class class_name { public final void waitForFullService() { if (!m_state.isInitialized()) { throw new RuntimeException("Service has not been initialized"); } synchronized (m_stateChangeLock) { // Loop until state >= RUNNING while (!m_state.isRunning()) { try { m_stateChangeLock.wait(); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { } // depends on control dependency: [catch], data = [none] } if (m_state.isStopping()) { throw new RuntimeException("Service " + this.getClass().getSimpleName() + " failed before reaching running state"); } } } }
public class class_name { public EventRecord[] monitor() { ArrayList<EventRecord> recs = new ArrayList<EventRecord>(); for (String nic : nics) { try { recs.add(query(nic)); } catch (UnknownHostException e) { e.printStackTrace(); } } EventRecord[] T = new EventRecord[recs.size()]; return recs.toArray(T); } }
public class class_name { public EventRecord[] monitor() { ArrayList<EventRecord> recs = new ArrayList<EventRecord>(); for (String nic : nics) { try { recs.add(query(nic)); // depends on control dependency: [try], data = [none] } catch (UnknownHostException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } EventRecord[] T = new EventRecord[recs.size()]; return recs.toArray(T); } }
public class class_name { public void marshall(HistoricalMetric historicalMetric, ProtocolMarshaller protocolMarshaller) { if (historicalMetric == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(historicalMetric.getName(), NAME_BINDING); protocolMarshaller.marshall(historicalMetric.getThreshold(), THRESHOLD_BINDING); protocolMarshaller.marshall(historicalMetric.getStatistic(), STATISTIC_BINDING); protocolMarshaller.marshall(historicalMetric.getUnit(), UNIT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(HistoricalMetric historicalMetric, ProtocolMarshaller protocolMarshaller) { if (historicalMetric == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(historicalMetric.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(historicalMetric.getThreshold(), THRESHOLD_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(historicalMetric.getStatistic(), STATISTIC_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(historicalMetric.getUnit(), UNIT_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }