code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { private void addNewTracks() { trackerLeft.spawnTracks(); trackerRight.spawnTracks(); List<PointTrack> newLeft = trackerLeft.getNewTracks(null); List<PointTrack> newRight = trackerRight.getNewTracks(null); // get a list of new tracks and their descriptions addNewToList(inputLeft, newLeft, pointsLeft, descLeft); addNewToList(inputRight,newRight,pointsRight,descRight); // associate using L2R assocL2R.setSource(pointsLeft,descLeft); assocL2R.setDestination(pointsRight, descRight); assocL2R.associate(); FastQueue<AssociatedIndex> matches = assocL2R.getMatches(); // storage for the triangulated location in the camera frame Point3D_F64 cameraP3 = new Point3D_F64(); for( int i = 0; i < matches.size; i++ ) { AssociatedIndex m = matches.get(i); PointTrack trackL = newLeft.get(m.src); PointTrack trackR = newRight.get(m.dst); // declare additional track information stored in each track. Tracks can be recycled so it // might not always need to be declared LeftTrackInfo infoLeft = trackL.getCookie(); if( infoLeft == null ) trackL.cookie = infoLeft = new LeftTrackInfo(); RightTrackInfo infoRight = trackR.getCookie(); if( infoRight == null ) trackR.cookie = infoRight = new RightTrackInfo(); Stereo2D3D p2d3d = infoLeft.location; // convert pixel observations into normalized image coordinates leftImageToNorm.compute(trackL.x,trackL.y,p2d3d.leftObs); rightImageToNorm.compute(trackR.x,trackR.y,p2d3d.rightObs); // triangulate 3D coordinate in the current camera frame if( triangulate.triangulate(p2d3d.leftObs,p2d3d.rightObs,leftToRight,cameraP3) ) { // put the track into the current keyframe coordinate system SePointOps_F64.transform(currToKey,cameraP3,p2d3d.location); // save a reference to the matching track in the right camera frame infoLeft.right = trackR; infoLeft.lastConsistent = infoLeft.lastInlier = tick; infoRight.left = trackL; } else { // triangulation failed, drop track trackerLeft.dropTrack(trackL); // TODO need way to mark right tracks which are unassociated after this loop throw new RuntimeException("This special case needs to be handled!"); } } // drop tracks that were not associated GrowQueue_I32 unassignedRight = assocL2R.getUnassociatedDestination(); for( int i = 0; i < unassignedRight.size; i++ ) { int index = unassignedRight.get(i); // System.out.println(" unassigned right "+newRight.get(index).x+" "+newRight.get(index).y); trackerRight.dropTrack(newRight.get(index)); } GrowQueue_I32 unassignedLeft = assocL2R.getUnassociatedSource(); for( int i = 0; i < unassignedLeft.size; i++ ) { int index = unassignedLeft.get(i); trackerLeft.dropTrack(newLeft.get(index)); } // System.out.println("Total left "+trackerLeft.getAllTracks(null).size()+" right "+trackerRight.getAllTracks(null).size()); // System.out.println("Associated: "+matches.size+" new left "+newLeft.size()+" new right "+newRight.size()); // System.out.println("New Tracks: Total: Left "+trackerLeft.getAllTracks(null).size()+" right "+ // trackerRight.getAllTracks(null).size()); // List<PointTrack> temp = trackerLeft.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } // temp = trackerRight.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } } }
public class class_name { private void addNewTracks() { trackerLeft.spawnTracks(); trackerRight.spawnTracks(); List<PointTrack> newLeft = trackerLeft.getNewTracks(null); List<PointTrack> newRight = trackerRight.getNewTracks(null); // get a list of new tracks and their descriptions addNewToList(inputLeft, newLeft, pointsLeft, descLeft); addNewToList(inputRight,newRight,pointsRight,descRight); // associate using L2R assocL2R.setSource(pointsLeft,descLeft); assocL2R.setDestination(pointsRight, descRight); assocL2R.associate(); FastQueue<AssociatedIndex> matches = assocL2R.getMatches(); // storage for the triangulated location in the camera frame Point3D_F64 cameraP3 = new Point3D_F64(); for( int i = 0; i < matches.size; i++ ) { AssociatedIndex m = matches.get(i); PointTrack trackL = newLeft.get(m.src); PointTrack trackR = newRight.get(m.dst); // declare additional track information stored in each track. Tracks can be recycled so it // might not always need to be declared LeftTrackInfo infoLeft = trackL.getCookie(); if( infoLeft == null ) trackL.cookie = infoLeft = new LeftTrackInfo(); RightTrackInfo infoRight = trackR.getCookie(); if( infoRight == null ) trackR.cookie = infoRight = new RightTrackInfo(); Stereo2D3D p2d3d = infoLeft.location; // convert pixel observations into normalized image coordinates leftImageToNorm.compute(trackL.x,trackL.y,p2d3d.leftObs); // depends on control dependency: [for], data = [none] rightImageToNorm.compute(trackR.x,trackR.y,p2d3d.rightObs); // depends on control dependency: [for], data = [none] // triangulate 3D coordinate in the current camera frame if( triangulate.triangulate(p2d3d.leftObs,p2d3d.rightObs,leftToRight,cameraP3) ) { // put the track into the current keyframe coordinate system SePointOps_F64.transform(currToKey,cameraP3,p2d3d.location); // depends on control dependency: [if], data = [none] // save a reference to the matching track in the right camera frame infoLeft.right = trackR; // depends on control dependency: [if], data = [none] infoLeft.lastConsistent = infoLeft.lastInlier = tick; // depends on control dependency: [if], data = [none] infoRight.left = trackL; // depends on control dependency: [if], data = [none] } else { // triangulation failed, drop track trackerLeft.dropTrack(trackL); // depends on control dependency: [if], data = [none] // TODO need way to mark right tracks which are unassociated after this loop throw new RuntimeException("This special case needs to be handled!"); } } // drop tracks that were not associated GrowQueue_I32 unassignedRight = assocL2R.getUnassociatedDestination(); for( int i = 0; i < unassignedRight.size; i++ ) { int index = unassignedRight.get(i); // System.out.println(" unassigned right "+newRight.get(index).x+" "+newRight.get(index).y); trackerRight.dropTrack(newRight.get(index)); // depends on control dependency: [for], data = [none] } GrowQueue_I32 unassignedLeft = assocL2R.getUnassociatedSource(); for( int i = 0; i < unassignedLeft.size; i++ ) { int index = unassignedLeft.get(i); trackerLeft.dropTrack(newLeft.get(index)); // depends on control dependency: [for], data = [none] } // System.out.println("Total left "+trackerLeft.getAllTracks(null).size()+" right "+trackerRight.getAllTracks(null).size()); // System.out.println("Associated: "+matches.size+" new left "+newLeft.size()+" new right "+newRight.size()); // System.out.println("New Tracks: Total: Left "+trackerLeft.getAllTracks(null).size()+" right "+ // trackerRight.getAllTracks(null).size()); // List<PointTrack> temp = trackerLeft.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } // temp = trackerRight.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } } }
public class class_name { static BsonDocument getDocumentVersionDoc(final BsonDocument document) { if (document == null || !document.containsKey(DOCUMENT_VERSION_FIELD)) { return null; } return document.getDocument(DOCUMENT_VERSION_FIELD, null); } }
public class class_name { static BsonDocument getDocumentVersionDoc(final BsonDocument document) { if (document == null || !document.containsKey(DOCUMENT_VERSION_FIELD)) { return null; // depends on control dependency: [if], data = [none] } return document.getDocument(DOCUMENT_VERSION_FIELD, null); } }
public class class_name { public boolean isInitialized(Object entity) { if (nullable) { return true; } Object variable = getValue(entity); return variable != null; } }
public class class_name { public boolean isInitialized(Object entity) { if (nullable) { return true; // depends on control dependency: [if], data = [none] } Object variable = getValue(entity); return variable != null; } }
public class class_name { public void setNodeGroups(java.util.Collection<NodeGroup> nodeGroups) { if (nodeGroups == null) { this.nodeGroups = null; return; } this.nodeGroups = new com.amazonaws.internal.SdkInternalList<NodeGroup>(nodeGroups); } }
public class class_name { public void setNodeGroups(java.util.Collection<NodeGroup> nodeGroups) { if (nodeGroups == null) { this.nodeGroups = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.nodeGroups = new com.amazonaws.internal.SdkInternalList<NodeGroup>(nodeGroups); } }
public class class_name { public String replaceFrom(CharSequence sequence, CharSequence replacement, CountMethod countMethod, SpanCondition spanCondition) { SpanCondition copySpan = spanCondition == SpanCondition.NOT_CONTAINED ? SpanCondition.SIMPLE : SpanCondition.NOT_CONTAINED; final boolean remove = replacement.length() == 0; StringBuilder result = new StringBuilder(); // TODO, we can optimize this to // avoid this allocation unless needed final int length = sequence.length(); OutputInt spanCount = null; for (int endCopy = 0; endCopy != length;) { int endModify; if (countMethod == CountMethod.WHOLE_SPAN) { endModify = unicodeSet.span(sequence, endCopy, spanCondition); } else { if (spanCount == null) { spanCount = new OutputInt(); } endModify = unicodeSet.spanAndCount(sequence, endCopy, spanCondition, spanCount); } if (remove || endModify == 0) { // do nothing } else if (countMethod == CountMethod.WHOLE_SPAN) { result.append(replacement); } else { for (int i = spanCount.value; i > 0; --i) { result.append(replacement); } } if (endModify == length) { break; } endCopy = unicodeSet.span(sequence, endModify, copySpan); result.append(sequence.subSequence(endModify, endCopy)); } return result.toString(); } }
public class class_name { public String replaceFrom(CharSequence sequence, CharSequence replacement, CountMethod countMethod, SpanCondition spanCondition) { SpanCondition copySpan = spanCondition == SpanCondition.NOT_CONTAINED ? SpanCondition.SIMPLE : SpanCondition.NOT_CONTAINED; final boolean remove = replacement.length() == 0; StringBuilder result = new StringBuilder(); // TODO, we can optimize this to // avoid this allocation unless needed final int length = sequence.length(); OutputInt spanCount = null; for (int endCopy = 0; endCopy != length;) { int endModify; if (countMethod == CountMethod.WHOLE_SPAN) { endModify = unicodeSet.span(sequence, endCopy, spanCondition); // depends on control dependency: [if], data = [none] } else { if (spanCount == null) { spanCount = new OutputInt(); // depends on control dependency: [if], data = [none] } endModify = unicodeSet.spanAndCount(sequence, endCopy, spanCondition, spanCount); // depends on control dependency: [if], data = [none] } if (remove || endModify == 0) { // do nothing } else if (countMethod == CountMethod.WHOLE_SPAN) { result.append(replacement); // depends on control dependency: [if], data = [none] } else { for (int i = spanCount.value; i > 0; --i) { result.append(replacement); // depends on control dependency: [for], data = [none] } } if (endModify == length) { break; } endCopy = unicodeSet.span(sequence, endModify, copySpan); // depends on control dependency: [for], data = [endCopy] result.append(sequence.subSequence(endModify, endCopy)); // depends on control dependency: [for], data = [endCopy] } return result.toString(); } }
public class class_name { protected void send(JSONObject json, ResponseListener listener) { String contentType = headers.get(CONTENT_TYPE); if (contentType == null) { contentType = JSON_CONTENT_TYPE; } RequestBody body = RequestBody.create(MediaType.parse(contentType), json.toString()); sendRequest(null, listener, body); } }
public class class_name { protected void send(JSONObject json, ResponseListener listener) { String contentType = headers.get(CONTENT_TYPE); if (contentType == null) { contentType = JSON_CONTENT_TYPE; // depends on control dependency: [if], data = [none] } RequestBody body = RequestBody.create(MediaType.parse(contentType), json.toString()); sendRequest(null, listener, body); } }
public class class_name { public static Value.Builder makeValue(Value.Builder value1, Value.Builder value2, Value.Builder... rest) { ArrayValue.Builder arrayValue = ArrayValue.newBuilder(); arrayValue.addValues(value1); arrayValue.addValues(value2); for (Value.Builder builder : rest) { arrayValue.addValues(builder); } return Value.newBuilder().setArrayValue(arrayValue); } }
public class class_name { public static Value.Builder makeValue(Value.Builder value1, Value.Builder value2, Value.Builder... rest) { ArrayValue.Builder arrayValue = ArrayValue.newBuilder(); arrayValue.addValues(value1); arrayValue.addValues(value2); for (Value.Builder builder : rest) { arrayValue.addValues(builder); // depends on control dependency: [for], data = [builder] } return Value.newBuilder().setArrayValue(arrayValue); } }
public class class_name { @Override public Iterator<I> iterator() { List<I> i = this.intervals; if (i == null) { Collector collector = new Collector(); this.accept(collector); i = Collections.unmodifiableList(collector.visited); this.intervals = i; } return i.iterator(); } }
public class class_name { @Override public Iterator<I> iterator() { List<I> i = this.intervals; if (i == null) { Collector collector = new Collector(); this.accept(collector); // depends on control dependency: [if], data = [none] i = Collections.unmodifiableList(collector.visited); // depends on control dependency: [if], data = [none] this.intervals = i; // depends on control dependency: [if], data = [none] } return i.iterator(); } }
public class class_name { public JSONObject similarDeleteByUrl(String url, HashMap<String, String> options) { AipRequest request = new AipRequest(); preOperation(request); request.addBody("url", url); if (options != null) { request.addBody(options); } request.setUri(ImageSearchConsts.SIMILAR_DELETE); postOperation(request); return requestServer(request); } }
public class class_name { public JSONObject similarDeleteByUrl(String url, HashMap<String, String> options) { AipRequest request = new AipRequest(); preOperation(request); request.addBody("url", url); if (options != null) { request.addBody(options); // depends on control dependency: [if], data = [(options] } request.setUri(ImageSearchConsts.SIMILAR_DELETE); postOperation(request); return requestServer(request); } }
public class class_name { boolean isConsiderateMethod(Collection<String> methodProceeds, ExecutableElement methodElement) { // int argNum methodElement.getParameters().size(); String signature = methodElement.getSimpleName().toString(); // + "(" + argNum + ")"; // Check if method ith same signature has been already proceed. if (methodProceeds.contains(signature)) { return false; } // Herited from Object TypeElement objectElement = environment.getElementUtils().getTypeElement(Object.class.getName()); if (objectElement.getEnclosedElements().contains(methodElement)) { return false; } // Static, not public ? if (!methodElement.getModifiers().contains(Modifier.PUBLIC) || methodElement.getModifiers().contains(Modifier.STATIC)) { return false; } // TransientDataService ? List<? extends AnnotationMirror> annotationMirrors = methodElement.getAnnotationMirrors(); for (AnnotationMirror annotationMirror : annotationMirrors) { if (annotationMirror.getAnnotationType().toString().equals(TransientDataService.class.getName())) { return false; } } methodProceeds.add(signature); return true; } }
public class class_name { boolean isConsiderateMethod(Collection<String> methodProceeds, ExecutableElement methodElement) { // int argNum methodElement.getParameters().size(); String signature = methodElement.getSimpleName().toString(); // + "(" + argNum + ")"; // Check if method ith same signature has been already proceed. if (methodProceeds.contains(signature)) { return false; // depends on control dependency: [if], data = [none] } // Herited from Object TypeElement objectElement = environment.getElementUtils().getTypeElement(Object.class.getName()); if (objectElement.getEnclosedElements().contains(methodElement)) { return false; // depends on control dependency: [if], data = [none] } // Static, not public ? if (!methodElement.getModifiers().contains(Modifier.PUBLIC) || methodElement.getModifiers().contains(Modifier.STATIC)) { return false; // depends on control dependency: [if], data = [none] } // TransientDataService ? List<? extends AnnotationMirror> annotationMirrors = methodElement.getAnnotationMirrors(); for (AnnotationMirror annotationMirror : annotationMirrors) { if (annotationMirror.getAnnotationType().toString().equals(TransientDataService.class.getName())) { return false; // depends on control dependency: [if], data = [none] } } methodProceeds.add(signature); return true; } }
public class class_name { public ManualGrpcSecurityMetadataSource remove(final ServiceDescriptor service) { requireNonNull(service, "service"); for (final MethodDescriptor<?, ?> method : service.getMethods()) { this.accessMap.remove(method); } return this; } }
public class class_name { public ManualGrpcSecurityMetadataSource remove(final ServiceDescriptor service) { requireNonNull(service, "service"); for (final MethodDescriptor<?, ?> method : service.getMethods()) { this.accessMap.remove(method); // depends on control dependency: [for], data = [method] } return this; } }
public class class_name { void killAll() { for (RjiRegistration reg : this.instancesById.values().toArray(new RjiRegistration[] {})) { reg.rji.handleInstruction(Instruction.KILL); } } }
public class class_name { void killAll() { for (RjiRegistration reg : this.instancesById.values().toArray(new RjiRegistration[] {})) { reg.rji.handleInstruction(Instruction.KILL); // depends on control dependency: [for], data = [reg] } } }
public class class_name { private void rotateLeft(Node<K, V> root) { Node<K, V> left = root.left; Node<K, V> pivot = root.right; Node<K, V> pivotLeft = pivot.left; Node<K, V> pivotRight = pivot.right; // move the pivot's left child to the root's right root.right = pivotLeft; if (pivotLeft != null) { pivotLeft.parent = root; } replaceInParent(root, pivot); // move the root to the pivot's left pivot.left = root; root.parent = pivot; // fix heights root.height = Math.max(left != null ? left.height : 0, pivotLeft != null ? pivotLeft.height : 0) + 1; pivot.height = Math.max(root.height, pivotRight != null ? pivotRight.height : 0) + 1; } }
public class class_name { private void rotateLeft(Node<K, V> root) { Node<K, V> left = root.left; Node<K, V> pivot = root.right; Node<K, V> pivotLeft = pivot.left; Node<K, V> pivotRight = pivot.right; // move the pivot's left child to the root's right root.right = pivotLeft; if (pivotLeft != null) { pivotLeft.parent = root; // depends on control dependency: [if], data = [none] } replaceInParent(root, pivot); // move the root to the pivot's left pivot.left = root; root.parent = pivot; // fix heights root.height = Math.max(left != null ? left.height : 0, pivotLeft != null ? pivotLeft.height : 0) + 1; pivot.height = Math.max(root.height, pivotRight != null ? pivotRight.height : 0) + 1; } }
public class class_name { public static Set<PhysicalEntity> getRelatedPhysicalEntities(BioPAXElement element, Set<PhysicalEntity> pes) { if (pes == null) pes = new HashSet<PhysicalEntity>(); if (element instanceof PhysicalEntity) { PhysicalEntity pe = (PhysicalEntity) element; if (!pes.contains(pe)) { pes.add(pe); for (Complex cmp : pe.getComponentOf()) { getRelatedPhysicalEntities(cmp, pes); } // This is a hack for BioPAX graph. Equivalence relations do not link members and // complexes because members cannot be addressed. Below call makes sure that if the // source node has a generic parents or children and they appear in a complex, we // include the complex in the sources. addEquivalentsComplexes(pe, pes); } } else if (element instanceof Xref) { for (XReferrable xrable : ((Xref) element).getXrefOf()) { getRelatedPhysicalEntities(xrable, pes); } } else if (element instanceof EntityReference) { EntityReference er = (EntityReference) element; for (SimplePhysicalEntity spe : er.getEntityReferenceOf()) { getRelatedPhysicalEntities(spe, pes); } for (EntityReference parentER : er.getMemberEntityReferenceOf()) { getRelatedPhysicalEntities(parentER, pes); } } return pes; } }
public class class_name { public static Set<PhysicalEntity> getRelatedPhysicalEntities(BioPAXElement element, Set<PhysicalEntity> pes) { if (pes == null) pes = new HashSet<PhysicalEntity>(); if (element instanceof PhysicalEntity) { PhysicalEntity pe = (PhysicalEntity) element; if (!pes.contains(pe)) { pes.add(pe); // depends on control dependency: [if], data = [none] for (Complex cmp : pe.getComponentOf()) { getRelatedPhysicalEntities(cmp, pes); // depends on control dependency: [for], data = [cmp] } // This is a hack for BioPAX graph. Equivalence relations do not link members and // complexes because members cannot be addressed. Below call makes sure that if the // source node has a generic parents or children and they appear in a complex, we // include the complex in the sources. addEquivalentsComplexes(pe, pes); // depends on control dependency: [if], data = [none] } } else if (element instanceof Xref) { for (XReferrable xrable : ((Xref) element).getXrefOf()) { getRelatedPhysicalEntities(xrable, pes); // depends on control dependency: [for], data = [xrable] } } else if (element instanceof EntityReference) { EntityReference er = (EntityReference) element; for (SimplePhysicalEntity spe : er.getEntityReferenceOf()) { getRelatedPhysicalEntities(spe, pes); // depends on control dependency: [for], data = [spe] } for (EntityReference parentER : er.getMemberEntityReferenceOf()) { getRelatedPhysicalEntities(parentER, pes); // depends on control dependency: [for], data = [parentER] } } return pes; } }
public class class_name { public void marshall(ListPrincipalsForPortfolioRequest listPrincipalsForPortfolioRequest, ProtocolMarshaller protocolMarshaller) { if (listPrincipalsForPortfolioRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getAcceptLanguage(), ACCEPTLANGUAGE_BINDING); protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getPortfolioId(), PORTFOLIOID_BINDING); protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getPageSize(), PAGESIZE_BINDING); protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getPageToken(), PAGETOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ListPrincipalsForPortfolioRequest listPrincipalsForPortfolioRequest, ProtocolMarshaller protocolMarshaller) { if (listPrincipalsForPortfolioRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getAcceptLanguage(), ACCEPTLANGUAGE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getPortfolioId(), PORTFOLIOID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getPageSize(), PAGESIZE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listPrincipalsForPortfolioRequest.getPageToken(), PAGETOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { static Map<String, Object> merge(Map<String, Object>... args) { if (args.length > 1) { Map<String, Object> obj = args[0]; for (int i = 1; i < args.length; i++) { obj = mergeTwo(obj, args[i]); } return obj; } else if (args.length == 1) { return args[0]; } else { return null; } } }
public class class_name { static Map<String, Object> merge(Map<String, Object>... args) { if (args.length > 1) { Map<String, Object> obj = args[0]; for (int i = 1; i < args.length; i++) { obj = mergeTwo(obj, args[i]); // depends on control dependency: [for], data = [i] } return obj; // depends on control dependency: [if], data = [none] } else if (args.length == 1) { return args[0]; // depends on control dependency: [if], data = [none] } else { return null; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Path("/{roleName}/users/{cuid}") @ApiOperation(value="Delete a role or remove a user from a role", notes="If users/{cuid} is present, user is removed from role.", response=StatusMessage.class) public JSONObject delete(String path, JSONObject content, Map<String,String> headers) throws ServiceException, JSONException { String name = getSegment(path, 1); String rel = getSegment(path, 2); UserServices userServices = ServiceLocator.getUserServices(); try { if (rel == null) { userServices.deleteRole(name); } else if (rel.equals("users")) { String cuid = getSegment(path, 3); userServices.removeUserFromRole(cuid, name); } } catch (DataAccessException ex) { throw new ServiceException(HTTP_500_INTERNAL_ERROR, ex.getMessage(), ex); } return null; } }
public class class_name { @Path("/{roleName}/users/{cuid}") @ApiOperation(value="Delete a role or remove a user from a role", notes="If users/{cuid} is present, user is removed from role.", response=StatusMessage.class) public JSONObject delete(String path, JSONObject content, Map<String,String> headers) throws ServiceException, JSONException { String name = getSegment(path, 1); String rel = getSegment(path, 2); UserServices userServices = ServiceLocator.getUserServices(); try { if (rel == null) { userServices.deleteRole(name); // depends on control dependency: [if], data = [none] } else if (rel.equals("users")) { String cuid = getSegment(path, 3); userServices.removeUserFromRole(cuid, name); // depends on control dependency: [if], data = [none] } } catch (DataAccessException ex) { throw new ServiceException(HTTP_500_INTERNAL_ERROR, ex.getMessage(), ex); } return null; } }
public class class_name { public static Map<Long, InProgressTx> txnBackwardsCompatCheck(int defaultLongTimeout, long longTimeoutTolerance, Map<Long, InProgressTx> inProgress) { for (Map.Entry<Long, InProgressTx> entry : inProgress.entrySet()) { long writePointer = entry.getKey(); long expiration = entry.getValue().getExpiration(); // LONG transactions will either have a negative expiration or expiration set to the long timeout // use a fudge factor on the expiration check, since expiraton is set based on system time, not the write pointer if (entry.getValue().getType() == null && (expiration < 0 || (getTxExpirationFromWritePointer(writePointer, defaultLongTimeout) - expiration < longTimeoutTolerance))) { // handle null expiration long newExpiration = getTxExpirationFromWritePointer(writePointer, defaultLongTimeout); InProgressTx compatTx = new InProgressTx(entry.getValue().getVisibilityUpperBound(), newExpiration, TransactionType.LONG, entry.getValue().getCheckpointWritePointers()); entry.setValue(compatTx); } else if (entry.getValue().getType() == null) { InProgressTx compatTx = new InProgressTx(entry.getValue().getVisibilityUpperBound(), entry.getValue().getExpiration(), TransactionType.SHORT, entry.getValue().getCheckpointWritePointers()); entry.setValue(compatTx); } } return inProgress; } }
public class class_name { public static Map<Long, InProgressTx> txnBackwardsCompatCheck(int defaultLongTimeout, long longTimeoutTolerance, Map<Long, InProgressTx> inProgress) { for (Map.Entry<Long, InProgressTx> entry : inProgress.entrySet()) { long writePointer = entry.getKey(); long expiration = entry.getValue().getExpiration(); // LONG transactions will either have a negative expiration or expiration set to the long timeout // use a fudge factor on the expiration check, since expiraton is set based on system time, not the write pointer if (entry.getValue().getType() == null && (expiration < 0 || (getTxExpirationFromWritePointer(writePointer, defaultLongTimeout) - expiration < longTimeoutTolerance))) { // handle null expiration long newExpiration = getTxExpirationFromWritePointer(writePointer, defaultLongTimeout); InProgressTx compatTx = new InProgressTx(entry.getValue().getVisibilityUpperBound(), newExpiration, TransactionType.LONG, entry.getValue().getCheckpointWritePointers()); entry.setValue(compatTx); // depends on control dependency: [if], data = [none] } else if (entry.getValue().getType() == null) { InProgressTx compatTx = new InProgressTx(entry.getValue().getVisibilityUpperBound(), entry.getValue().getExpiration(), TransactionType.SHORT, entry.getValue().getCheckpointWritePointers()); entry.setValue(compatTx); // depends on control dependency: [if], data = [none] } } return inProgress; } }
public class class_name { private <T extends IEntity> String getEntityName(T entity) { if (entity != null) { return entity.getClass().getSimpleName().toLowerCase(); } return null; } }
public class class_name { private <T extends IEntity> String getEntityName(T entity) { if (entity != null) { return entity.getClass().getSimpleName().toLowerCase(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static final byte[] crypt(byte salt[], byte original[]) { byte result[] = new byte[13]; byte byteZero = salt[0]; byte byteOne = salt[1]; result[0] = byteZero; result[1] = byteOne; int Eswap0 = con_salt[byteZero]; int Eswap1 = con_salt[byteOne] << 4; byte key[] = new byte[8]; for (int i = 0; i < key.length; i++) { key[i] = (byte) 0; } for (int i = 0; i < key.length && i < original.length; i++) { int iChar = (int) original[i]; key[i] = (byte) (iChar << 1); } int schedule[] = des_set_key(key); int out[] = body(schedule, Eswap0, Eswap1); byte b[] = new byte[9]; intToFourBytes(out[0], b, 0); intToFourBytes(out[1], b, 4); b[8] = 0; for (int i = 2, y = 0, u = 0x80; i < 13; i++) { for (int j = 0, c = 0; j < 6; j++) { c <<= 1; if (((int) b[y] & u) != 0) { c |= 1; } u >>>= 1; if (u == 0) { y++; u = 0x80; } result[i] = cov_2byte[c]; } } return result; } }
public class class_name { public static final byte[] crypt(byte salt[], byte original[]) { byte result[] = new byte[13]; byte byteZero = salt[0]; byte byteOne = salt[1]; result[0] = byteZero; result[1] = byteOne; int Eswap0 = con_salt[byteZero]; int Eswap1 = con_salt[byteOne] << 4; byte key[] = new byte[8]; for (int i = 0; i < key.length; i++) { key[i] = (byte) 0; // depends on control dependency: [for], data = [i] } for (int i = 0; i < key.length && i < original.length; i++) { int iChar = (int) original[i]; key[i] = (byte) (iChar << 1); // depends on control dependency: [for], data = [i] } int schedule[] = des_set_key(key); int out[] = body(schedule, Eswap0, Eswap1); byte b[] = new byte[9]; intToFourBytes(out[0], b, 0); intToFourBytes(out[1], b, 4); b[8] = 0; for (int i = 2, y = 0, u = 0x80; i < 13; i++) { for (int j = 0, c = 0; j < 6; j++) { c <<= 1; // depends on control dependency: [for], data = [none] if (((int) b[y] & u) != 0) { c |= 1; // depends on control dependency: [if], data = [none] } u >>>= 1; // depends on control dependency: [for], data = [none] if (u == 0) { y++; // depends on control dependency: [if], data = [none] u = 0x80; // depends on control dependency: [if], data = [none] } result[i] = cov_2byte[c]; // depends on control dependency: [for], data = [none] } } return result; } }
public class class_name { @Nonnull public static <KEYTYPE, VALUETYPE> String getImplodedMapped (@Nonnull final String sSepOuter, @Nonnull final String sSepInner, @Nullable final Map <? extends KEYTYPE, ? extends VALUETYPE> aElements, @Nonnull final Function <? super KEYTYPE, String> aKeyMapper, @Nonnull final Function <? super VALUETYPE, String> aValueMapper) { ValueEnforcer.notNull (sSepOuter, "SepOuter"); ValueEnforcer.notNull (sSepInner, "SepInner"); final StringBuilder aSB = new StringBuilder (); if (aElements != null) { boolean bFirst = true; for (final Map.Entry <? extends KEYTYPE, ? extends VALUETYPE> aElement : aElements.entrySet ()) { if (bFirst) bFirst = false; else aSB.append (sSepOuter); aSB.append (aKeyMapper.apply (aElement.getKey ())) .append (sSepInner) .append (aValueMapper.apply (aElement.getValue ())); } } return aSB.toString (); } }
public class class_name { @Nonnull public static <KEYTYPE, VALUETYPE> String getImplodedMapped (@Nonnull final String sSepOuter, @Nonnull final String sSepInner, @Nullable final Map <? extends KEYTYPE, ? extends VALUETYPE> aElements, @Nonnull final Function <? super KEYTYPE, String> aKeyMapper, @Nonnull final Function <? super VALUETYPE, String> aValueMapper) { ValueEnforcer.notNull (sSepOuter, "SepOuter"); ValueEnforcer.notNull (sSepInner, "SepInner"); final StringBuilder aSB = new StringBuilder (); if (aElements != null) { boolean bFirst = true; for (final Map.Entry <? extends KEYTYPE, ? extends VALUETYPE> aElement : aElements.entrySet ()) { if (bFirst) bFirst = false; else aSB.append (sSepOuter); aSB.append (aKeyMapper.apply (aElement.getKey ())) .append (sSepInner) .append (aValueMapper.apply (aElement.getValue ())); // depends on control dependency: [for], data = [aElement] } } return aSB.toString (); } }
public class class_name { void lookupPrimaryKeys() throws StageException { Connection connection = null; try { connection = dataSource.getConnection(); primaryKeyColumns = jdbcUtil.getPrimaryKeys(connection, schema, tableName); } catch (SQLException e) { String formattedError = jdbcUtil.formatSqlException(e); LOG.error(formattedError, e); throw new StageException(JdbcErrors.JDBC_17, tableName, formattedError); } finally { if (connection != null) { try { connection.close(); } catch (SQLException e) { String formattedError = jdbcUtil.formatSqlException(e); LOG.error(formattedError, e); } } } } }
public class class_name { void lookupPrimaryKeys() throws StageException { Connection connection = null; try { connection = dataSource.getConnection(); primaryKeyColumns = jdbcUtil.getPrimaryKeys(connection, schema, tableName); } catch (SQLException e) { String formattedError = jdbcUtil.formatSqlException(e); LOG.error(formattedError, e); throw new StageException(JdbcErrors.JDBC_17, tableName, formattedError); } finally { if (connection != null) { try { connection.close(); // depends on control dependency: [try], data = [none] } catch (SQLException e) { String formattedError = jdbcUtil.formatSqlException(e); LOG.error(formattedError, e); } // depends on control dependency: [catch], data = [none] } } } }
public class class_name { @SuppressWarnings("unused") public Object readResolve() { if (configVersion < 1) { if (isNull(nodeProperties)) nodeProperties = new ArrayList<>(); nodeProperties.add(new DockerNodeProperty("DOCKER_CONTAINER_ID", "JENKINS_CLOUD_ID", "DOCKER_HOST")); configVersion = 1; } // real @Nonnull if (mode == null) { mode = Node.Mode.NORMAL; } if (retentionStrategy == null) { retentionStrategy = new DockerOnceRetentionStrategy(10); } try { labelSet = Label.parse(getLabelString()); // fails sometimes under debugger } catch (Throwable t) { LOG.error("Can't parse labels: {}", t); } return this; } }
public class class_name { @SuppressWarnings("unused") public Object readResolve() { if (configVersion < 1) { if (isNull(nodeProperties)) nodeProperties = new ArrayList<>(); nodeProperties.add(new DockerNodeProperty("DOCKER_CONTAINER_ID", "JENKINS_CLOUD_ID", "DOCKER_HOST")); // depends on control dependency: [if], data = [none] configVersion = 1; // depends on control dependency: [if], data = [none] } // real @Nonnull if (mode == null) { mode = Node.Mode.NORMAL; // depends on control dependency: [if], data = [none] } if (retentionStrategy == null) { retentionStrategy = new DockerOnceRetentionStrategy(10); // depends on control dependency: [if], data = [none] } try { labelSet = Label.parse(getLabelString()); // fails sometimes under debugger // depends on control dependency: [try], data = [none] } catch (Throwable t) { LOG.error("Can't parse labels: {}", t); } // depends on control dependency: [catch], data = [none] return this; } }
public class class_name { public static Long getContentLengthFromContentRange(ObjectMetadata metadata) { ValidationUtils.assertNotNull(metadata, "Object metadata"); String contentRange = (String) metadata.getRawMetadataValue(Headers.CONTENT_RANGE); if (contentRange != null) { try { String[] tokens = contentRange.split("[ -/]+"); return Long.parseLong(tokens[3]); } catch (Exception e) { log.info(String.format("Error parsing 'Content-Range' header value: %s. So returning " + "null value for content length", contentRange), e); } } return null; } }
public class class_name { public static Long getContentLengthFromContentRange(ObjectMetadata metadata) { ValidationUtils.assertNotNull(metadata, "Object metadata"); String contentRange = (String) metadata.getRawMetadataValue(Headers.CONTENT_RANGE); if (contentRange != null) { try { String[] tokens = contentRange.split("[ -/]+"); return Long.parseLong(tokens[3]); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.info(String.format("Error parsing 'Content-Range' header value: %s. So returning " + "null value for content length", contentRange), e); } // depends on control dependency: [catch], data = [none] } return null; } }
public class class_name { public Entry<T> dequeueMin() { /* Check for whether we're empty. */ if (isEmpty()) throw new NoSuchElementException("Heap is empty."); /* Otherwise, we're about to lose an element, so decrement the number of * entries in this heap. */ --mSize; /* Grab the minimum element so we know what to return. */ Entry<T> minElem = mMin; /* Now, we need to get rid of this element from the list of roots. There * are two cases to consider. First, if this is the only element in the * list of roots, we set the list of roots to be null by clearing mMin. * Otherwise, if it's not null, then we write the elements next to the * min element around the min element to remove it, then arbitrarily * reassign the min. */ if (mMin.mNext == mMin) { // Case one mMin = null; } else { // Case two mMin.mPrev.mNext = mMin.mNext; mMin.mNext.mPrev = mMin.mPrev; mMin = mMin.mNext; // Arbitrary element of the root list. } /* Next, clear the parent fields of all of the min element's children, * since they're about to become roots. Because the elements are * stored in a circular list, the traversal is a bit complex. */ if (minElem.mChild != null) { /* Keep track of the first visited node. */ Entry<?> curr = minElem.mChild; do { curr.mParent = null; /* Walk to the next node, then stop if this is the node we * started at. */ curr = curr.mNext; } while (curr != minElem.mChild); } /* Next, splice the children of the root node into the topmost list, * then set mMin to point somewhere in that list. */ mMin = mergeLists(mMin, minElem.mChild); /* If there are no entries left, we're done. */ if (mMin == null) return minElem; /* Next, we need to coalsce all of the roots so that there is only one * tree of each degree. To track trees of each size, we allocate an * ArrayList where the entry at position i is either null or the * unique tree of degree i. */ List<Entry<T>> treeTable = new ArrayList<Entry<T>>(); /* We need to traverse the entire list, but since we're going to be * messing around with it we have to be careful not to break our * traversal order mid-stream. One major challenge is how to detect * whether we're visiting the same node twice. To do this, we'll * spent a bit of overhead adding all of the nodes to a list, and * then will visit each element of this list in order. */ List<Entry<T>> toVisit = new ArrayList<Entry<T>>(); /* To add everything, we'll iterate across the elements until we * find the first element twice. We check this by looping while the * list is empty or while the current element isn't the first element * of that list. */ for (Entry<T> curr = mMin; toVisit.isEmpty() || toVisit.get(0) != curr; curr = curr.mNext) toVisit.add(curr); /* Traverse this list and perform the appropriate unioning steps. */ for (Entry<T> curr : toVisit) { /* Keep merging until a match arises. */ while (true) { /* Ensure that the list is long enough to hold an element of this * degree. */ while (curr.mDegree >= treeTable.size()) treeTable.add(null); /* If nothing's here, we're can record that this tree has this size * and are done processing. */ if (treeTable.get(curr.mDegree) == null) { treeTable.set(curr.mDegree, curr); break; } /* Otherwise, merge with what's there. */ Entry<T> other = treeTable.get(curr.mDegree); treeTable.set(curr.mDegree, null); // Clear the slot /* Determine which of the two trees has the smaller root, storing * the two tree accordingly. */ Entry<T> min = (other.mPriority < curr.mPriority) ? other : curr; Entry<T> max = (other.mPriority < curr.mPriority) ? curr : other; /* Break max out of the root list, then merge it into min's child * list. */ max.mNext.mPrev = max.mPrev; max.mPrev.mNext = max.mNext; /* Make it a singleton so that we can merge it. */ max.mNext = max.mPrev = max; min.mChild = mergeLists(min.mChild, max); /* Reparent max appropriately. */ max.mParent = min; /* Clear max's mark, since it can now lose another child. */ max.mIsMarked = false; /* Increase min's degree; it now has another child. */ ++min.mDegree; /* Continue merging this tree. */ curr = min; } /* Update the global min based on this node. Note that we compare * for <= instead of < here. That's because if we just did a * reparent operation that merged two different trees of equal * priority, we need to make sure that the min pointer points to * the root-level one. */ if (curr.mPriority <= mMin.mPriority) mMin = curr; } return minElem; } }
public class class_name { public Entry<T> dequeueMin() { /* Check for whether we're empty. */ if (isEmpty()) throw new NoSuchElementException("Heap is empty."); /* Otherwise, we're about to lose an element, so decrement the number of * entries in this heap. */ --mSize; /* Grab the minimum element so we know what to return. */ Entry<T> minElem = mMin; /* Now, we need to get rid of this element from the list of roots. There * are two cases to consider. First, if this is the only element in the * list of roots, we set the list of roots to be null by clearing mMin. * Otherwise, if it's not null, then we write the elements next to the * min element around the min element to remove it, then arbitrarily * reassign the min. */ if (mMin.mNext == mMin) { // Case one mMin = null; // depends on control dependency: [if], data = [none] } else { // Case two mMin.mPrev.mNext = mMin.mNext; // depends on control dependency: [if], data = [none] mMin.mNext.mPrev = mMin.mPrev; // depends on control dependency: [if], data = [none] mMin = mMin.mNext; // Arbitrary element of the root list. // depends on control dependency: [if], data = [none] } /* Next, clear the parent fields of all of the min element's children, * since they're about to become roots. Because the elements are * stored in a circular list, the traversal is a bit complex. */ if (minElem.mChild != null) { /* Keep track of the first visited node. */ Entry<?> curr = minElem.mChild; do { curr.mParent = null; /* Walk to the next node, then stop if this is the node we * started at. */ curr = curr.mNext; } while (curr != minElem.mChild); } /* Next, splice the children of the root node into the topmost list, * then set mMin to point somewhere in that list. */ mMin = mergeLists(mMin, minElem.mChild); /* If there are no entries left, we're done. */ if (mMin == null) return minElem; /* Next, we need to coalsce all of the roots so that there is only one * tree of each degree. To track trees of each size, we allocate an * ArrayList where the entry at position i is either null or the * unique tree of degree i. */ List<Entry<T>> treeTable = new ArrayList<Entry<T>>(); /* We need to traverse the entire list, but since we're going to be * messing around with it we have to be careful not to break our * traversal order mid-stream. One major challenge is how to detect * whether we're visiting the same node twice. To do this, we'll * spent a bit of overhead adding all of the nodes to a list, and * then will visit each element of this list in order. */ List<Entry<T>> toVisit = new ArrayList<Entry<T>>(); /* To add everything, we'll iterate across the elements until we * find the first element twice. We check this by looping while the * list is empty or while the current element isn't the first element * of that list. */ for (Entry<T> curr = mMin; toVisit.isEmpty() || toVisit.get(0) != curr; curr = curr.mNext) toVisit.add(curr); /* Traverse this list and perform the appropriate unioning steps. */ for (Entry<T> curr : toVisit) { /* Keep merging until a match arises. */ while (true) { /* Ensure that the list is long enough to hold an element of this * degree. */ while (curr.mDegree >= treeTable.size()) treeTable.add(null); /* If nothing's here, we're can record that this tree has this size * and are done processing. */ if (treeTable.get(curr.mDegree) == null) { treeTable.set(curr.mDegree, curr); // depends on control dependency: [if], data = [none] break; } /* Otherwise, merge with what's there. */ Entry<T> other = treeTable.get(curr.mDegree); treeTable.set(curr.mDegree, null); // Clear the slot // depends on control dependency: [while], data = [none] /* Determine which of the two trees has the smaller root, storing * the two tree accordingly. */ Entry<T> min = (other.mPriority < curr.mPriority) ? other : curr; Entry<T> max = (other.mPriority < curr.mPriority) ? curr : other; /* Break max out of the root list, then merge it into min's child * list. */ max.mNext.mPrev = max.mPrev; // depends on control dependency: [while], data = [none] max.mPrev.mNext = max.mNext; // depends on control dependency: [while], data = [none] /* Make it a singleton so that we can merge it. */ max.mNext = max.mPrev = max; // depends on control dependency: [while], data = [none] min.mChild = mergeLists(min.mChild, max); // depends on control dependency: [while], data = [none] /* Reparent max appropriately. */ max.mParent = min; // depends on control dependency: [while], data = [none] /* Clear max's mark, since it can now lose another child. */ max.mIsMarked = false; // depends on control dependency: [while], data = [none] /* Increase min's degree; it now has another child. */ ++min.mDegree; // depends on control dependency: [while], data = [none] /* Continue merging this tree. */ curr = min; // depends on control dependency: [while], data = [none] } /* Update the global min based on this node. Note that we compare * for <= instead of < here. That's because if we just did a * reparent operation that merged two different trees of equal * priority, we need to make sure that the min pointer points to * the root-level one. */ if (curr.mPriority <= mMin.mPriority) mMin = curr; } return minElem; } }
public class class_name { private void vibrateIfEnabled() { final boolean enabled = styledAttributes.getBoolean(R.styleable.PinLock_vibrateOnClick, false); if(enabled){ Vibrator v = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE); final int duration = styledAttributes.getInt(R.styleable.PinLock_vibrateDuration, 20); v.vibrate(duration); } } }
public class class_name { private void vibrateIfEnabled() { final boolean enabled = styledAttributes.getBoolean(R.styleable.PinLock_vibrateOnClick, false); if(enabled){ Vibrator v = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE); final int duration = styledAttributes.getInt(R.styleable.PinLock_vibrateDuration, 20); v.vibrate(duration); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static byte[] join(byte[] separator, byte[]... components) { if (components == null || components.length == 0) { return Constants.EMPTY_BYTES; } int finalSize = 0; if (separator != null) { finalSize = separator.length * (components.length - 1); } for (byte[] comp : components) { finalSize += comp.length; } byte[] buf = new byte[finalSize]; int offset = 0; for (int i=0; i < components.length; i++) { System.arraycopy(components[i], 0, buf, offset, components[i].length); offset += components[i].length; if (i < (components.length-1) && separator != null && separator.length > 0) { System.arraycopy(separator, 0, buf, offset, separator.length); offset += separator.length; } } return buf; } }
public class class_name { public static byte[] join(byte[] separator, byte[]... components) { if (components == null || components.length == 0) { return Constants.EMPTY_BYTES; // depends on control dependency: [if], data = [none] } int finalSize = 0; if (separator != null) { finalSize = separator.length * (components.length - 1); // depends on control dependency: [if], data = [none] } for (byte[] comp : components) { finalSize += comp.length; // depends on control dependency: [for], data = [comp] } byte[] buf = new byte[finalSize]; int offset = 0; for (int i=0; i < components.length; i++) { System.arraycopy(components[i], 0, buf, offset, components[i].length); // depends on control dependency: [for], data = [i] offset += components[i].length; // depends on control dependency: [for], data = [i] if (i < (components.length-1) && separator != null && separator.length > 0) { System.arraycopy(separator, 0, buf, offset, separator.length); // depends on control dependency: [if], data = [none] offset += separator.length; // depends on control dependency: [if], data = [none] } } return buf; } }
public class class_name { public static String getOffsetCodeFromSchedule(Schedule schedule) { double doubleLength = 0; for(int i = 0; i < schedule.getNumberOfPeriods(); i ++) { doubleLength += schedule.getPeriodLength(i); } doubleLength /= schedule.getNumberOfPeriods(); doubleLength *= 12; int periodLength = (int) Math.round(doubleLength); String offsetCode = periodLength + "M"; return offsetCode; } }
public class class_name { public static String getOffsetCodeFromSchedule(Schedule schedule) { double doubleLength = 0; for(int i = 0; i < schedule.getNumberOfPeriods(); i ++) { doubleLength += schedule.getPeriodLength(i); // depends on control dependency: [for], data = [i] } doubleLength /= schedule.getNumberOfPeriods(); doubleLength *= 12; int periodLength = (int) Math.round(doubleLength); String offsetCode = periodLength + "M"; return offsetCode; } }
public class class_name { private void drawForeground(final ZonedDateTime TIME) { foregroundCtx.clearRect(0, 0, size, size); Locale locale = clock.getLocale(); // draw the time if (clock.isTextVisible()) { foregroundCtx.setFill(textColor); foregroundCtx.setTextBaseline(VPos.CENTER); foregroundCtx.setTextAlign(TextAlignment.CENTER); if (Locale.US == locale) { foregroundCtx.setFont(Fonts.digital(0.17 * size)); foregroundCtx.fillText(AMPM_TIME_FORMATTER.format(TIME), center, center); } else { foregroundCtx.setFont(Fonts.digital(0.2 * size)); foregroundCtx.fillText(TIME_FORMATTER.format(TIME), center, center); } } // draw the date if (clock.isDateVisible()) { foregroundCtx.setFill(dateColor); foregroundCtx.setFont(Fonts.digital(0.09 * size)); foregroundCtx.fillText(dateFormat.format(TIME), center, size * 0.65); } // draw the alarmOn icon if (clock.isAlarmsEnabled() && clock.getAlarms().size() > 0) { foregroundCtx.setFill(alarmColor); drawAlarmIcon(foregroundCtx, foregroundCtx.getFill()); } } }
public class class_name { private void drawForeground(final ZonedDateTime TIME) { foregroundCtx.clearRect(0, 0, size, size); Locale locale = clock.getLocale(); // draw the time if (clock.isTextVisible()) { foregroundCtx.setFill(textColor); // depends on control dependency: [if], data = [none] foregroundCtx.setTextBaseline(VPos.CENTER); // depends on control dependency: [if], data = [none] foregroundCtx.setTextAlign(TextAlignment.CENTER); // depends on control dependency: [if], data = [none] if (Locale.US == locale) { foregroundCtx.setFont(Fonts.digital(0.17 * size)); // depends on control dependency: [if], data = [none] foregroundCtx.fillText(AMPM_TIME_FORMATTER.format(TIME), center, center); // depends on control dependency: [if], data = [none] } else { foregroundCtx.setFont(Fonts.digital(0.2 * size)); // depends on control dependency: [if], data = [none] foregroundCtx.fillText(TIME_FORMATTER.format(TIME), center, center); // depends on control dependency: [if], data = [none] } } // draw the date if (clock.isDateVisible()) { foregroundCtx.setFill(dateColor); // depends on control dependency: [if], data = [none] foregroundCtx.setFont(Fonts.digital(0.09 * size)); // depends on control dependency: [if], data = [none] foregroundCtx.fillText(dateFormat.format(TIME), center, size * 0.65); // depends on control dependency: [if], data = [none] } // draw the alarmOn icon if (clock.isAlarmsEnabled() && clock.getAlarms().size() > 0) { foregroundCtx.setFill(alarmColor); // depends on control dependency: [if], data = [none] drawAlarmIcon(foregroundCtx, foregroundCtx.getFill()); // depends on control dependency: [if], data = [none] } } }
public class class_name { public byte[] decryptBytes(String encryptedCredentials) { checkNotNull(encryptedCredentials, "encryptedCredentials"); try { byte[] encryptedBytes = BaseEncoding.base64().omitPadding().decode(encryptedCredentials); Cipher cipher = Cipher.getInstance(CIPHER); cipher.init(Cipher.DECRYPT_MODE, _key, _iv); ByteBuffer plaintextBytes = ByteBuffer.wrap(cipher.doFinal(encryptedBytes)); int length = plaintextBytes.getInt(); byte[] apiKey = new byte[length]; plaintextBytes.get(apiKey); return apiKey; } catch (Throwable t) { // This shouldn't happen since AES is supported by all JVMs. throw Throwables.propagate(t); } } }
public class class_name { public byte[] decryptBytes(String encryptedCredentials) { checkNotNull(encryptedCredentials, "encryptedCredentials"); try { byte[] encryptedBytes = BaseEncoding.base64().omitPadding().decode(encryptedCredentials); Cipher cipher = Cipher.getInstance(CIPHER); cipher.init(Cipher.DECRYPT_MODE, _key, _iv); // depends on control dependency: [try], data = [none] ByteBuffer plaintextBytes = ByteBuffer.wrap(cipher.doFinal(encryptedBytes)); int length = plaintextBytes.getInt(); byte[] apiKey = new byte[length]; plaintextBytes.get(apiKey); // depends on control dependency: [try], data = [none] return apiKey; // depends on control dependency: [try], data = [none] } catch (Throwable t) { // This shouldn't happen since AES is supported by all JVMs. throw Throwables.propagate(t); } // depends on control dependency: [catch], data = [none] } }
public class class_name { static String capitalizeFirstCharOfString(final String input) { if (input == null || input.length() == 0) { return ""; } else if (input.length() == 1) { return input.toUpperCase(); } else { return input.substring(0, 1).toUpperCase() + input.substring(1); } } }
public class class_name { static String capitalizeFirstCharOfString(final String input) { if (input == null || input.length() == 0) { return ""; // depends on control dependency: [if], data = [none] } else if (input.length() == 1) { return input.toUpperCase(); // depends on control dependency: [if], data = [none] } else { return input.substring(0, 1).toUpperCase() + input.substring(1); // depends on control dependency: [if], data = [1)] } } }
public class class_name { public TouchActions scroll(WebElement onElement, int xOffset, int yOffset) { if (touchScreen != null) { action.addAction(new ScrollAction(touchScreen, (Locatable) onElement, xOffset, yOffset)); } return this; } }
public class class_name { public TouchActions scroll(WebElement onElement, int xOffset, int yOffset) { if (touchScreen != null) { action.addAction(new ScrollAction(touchScreen, (Locatable) onElement, xOffset, yOffset)); // depends on control dependency: [if], data = [(touchScreen] } return this; } }
public class class_name { @Bean public AuthorProvider authorProvider() { return () -> { final SecurityContext context = SecurityContextHolder.getContext(); if (context != null && context.getAuthentication() != null) { return context.getAuthentication().getPrincipal().toString(); } else { return "system"; } }; } }
public class class_name { @Bean public AuthorProvider authorProvider() { return () -> { final SecurityContext context = SecurityContextHolder.getContext(); if (context != null && context.getAuthentication() != null) { return context.getAuthentication().getPrincipal().toString(); // depends on control dependency: [if], data = [none] } else { return "system"; // depends on control dependency: [if], data = [none] } }; } }
public class class_name { public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { ActionErrors errors = null; // has the maximum length been exceeded? Boolean maxLengthExceeded = (Boolean) request.getAttribute(MultipartRequestHandler.ATTRIBUTE_MAX_LENGTH_EXCEEDED); if ((maxLengthExceeded != null) && (maxLengthExceeded.booleanValue())) { errors = new ActionErrors(); errors.add(ERROR_PROPERTY_MAX_LENGTH_EXCEEDED, new ActionMessage("maxLengthExceeded")); } else if (fileMap.size() > MAX_IMAGES_COUNT) { errors = new ActionErrors(); errors.add(ERROR_PROPERTY_MAX_LENGTH_EXCEEDED, new ActionMessage("maxLengthExceeded")); } else { // retrieve the file name Iterator iter = fileMap.values().iterator(); while (iter.hasNext()) { FormFile file = (FormFile) iter.next(); String fileName = file.getFileName(); if ((!fileName.toLowerCase().endsWith(".gif")) && !(fileName.toLowerCase().endsWith(".jpg")) && !(fileName.toLowerCase().endsWith(".png"))) { errors = new ActionErrors(); errors.add("notImage", new ActionMessage("notImage")); } } } return errors; } }
public class class_name { public ActionErrors validate(ActionMapping mapping, HttpServletRequest request) { ActionErrors errors = null; // has the maximum length been exceeded? Boolean maxLengthExceeded = (Boolean) request.getAttribute(MultipartRequestHandler.ATTRIBUTE_MAX_LENGTH_EXCEEDED); if ((maxLengthExceeded != null) && (maxLengthExceeded.booleanValue())) { errors = new ActionErrors(); // depends on control dependency: [if], data = [none] errors.add(ERROR_PROPERTY_MAX_LENGTH_EXCEEDED, new ActionMessage("maxLengthExceeded")); // depends on control dependency: [if], data = [none] } else if (fileMap.size() > MAX_IMAGES_COUNT) { errors = new ActionErrors(); // depends on control dependency: [if], data = [none] errors.add(ERROR_PROPERTY_MAX_LENGTH_EXCEEDED, new ActionMessage("maxLengthExceeded")); // depends on control dependency: [if], data = [none] } else { // retrieve the file name Iterator iter = fileMap.values().iterator(); while (iter.hasNext()) { FormFile file = (FormFile) iter.next(); String fileName = file.getFileName(); if ((!fileName.toLowerCase().endsWith(".gif")) && !(fileName.toLowerCase().endsWith(".jpg")) && !(fileName.toLowerCase().endsWith(".png"))) { errors = new ActionErrors(); // depends on control dependency: [if], data = [none] errors.add("notImage", new ActionMessage("notImage")); // depends on control dependency: [if], data = [none] } } } return errors; } }
public class class_name { public String toRuleString() { if (0 == this.ruleNumber) { return this.grammar.r0String; } return this.first.toString() + SPACE + this.second.toString() + SPACE; } }
public class class_name { public String toRuleString() { if (0 == this.ruleNumber) { return this.grammar.r0String; // depends on control dependency: [if], data = [none] } return this.first.toString() + SPACE + this.second.toString() + SPACE; } }
public class class_name { public boolean set(long index) { if (!(index < size)) throw new IndexOutOfBoundsException(); if (!get(index)) { bytes.writeLong(offset(index), bytes.readLong(offset(index)) | (1l << position(index))); count++; return true; } return false; } }
public class class_name { public boolean set(long index) { if (!(index < size)) throw new IndexOutOfBoundsException(); if (!get(index)) { bytes.writeLong(offset(index), bytes.readLong(offset(index)) | (1l << position(index))); // depends on control dependency: [if], data = [none] count++; // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public static Fiat parseFiatInexact(final String currencyCode, final String str) { try { long val = new BigDecimal(str).movePointRight(SMALLEST_UNIT_EXPONENT).longValue(); return Fiat.valueOf(currencyCode, val); } catch (ArithmeticException e) { throw new IllegalArgumentException(e); } } }
public class class_name { public static Fiat parseFiatInexact(final String currencyCode, final String str) { try { long val = new BigDecimal(str).movePointRight(SMALLEST_UNIT_EXPONENT).longValue(); return Fiat.valueOf(currencyCode, val); // depends on control dependency: [try], data = [none] } catch (ArithmeticException e) { throw new IllegalArgumentException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static boolean qualifiedNameEquals(String s1, String s2) { if (isNullOrEmpty(s1)) { return isNullOrEmpty(s2); } if (!s1.equals(s2)) { final String simple1 = simpleName(s1); final String simple2 = simpleName(s2); return simpleNameEquals(simple1, simple2); } return true; } }
public class class_name { public static boolean qualifiedNameEquals(String s1, String s2) { if (isNullOrEmpty(s1)) { return isNullOrEmpty(s2); // depends on control dependency: [if], data = [none] } if (!s1.equals(s2)) { final String simple1 = simpleName(s1); final String simple2 = simpleName(s2); return simpleNameEquals(simple1, simple2); // depends on control dependency: [if], data = [none] } return true; } }
public class class_name { public static void loopBlocks(int start , int endExclusive , IntRangeConsumer consumer ) { final ForkJoinPool pool = BoofConcurrency.pool; int numThreads = pool.getParallelism(); int range = endExclusive-start; if( range == 0 ) // nothing to do here! return; if( range < 0 ) throw new IllegalArgumentException("end must be more than start. "+start+" -> "+endExclusive); // Did some experimentation here. Gave it more threads than were needed or exactly what was needed // exactly seemed to do better in the test cases int blockSize = Math.max(1,range/numThreads); try { pool.submit(new IntRangeTask(start,endExclusive,blockSize,consumer)).get(); } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } } }
public class class_name { public static void loopBlocks(int start , int endExclusive , IntRangeConsumer consumer ) { final ForkJoinPool pool = BoofConcurrency.pool; int numThreads = pool.getParallelism(); int range = endExclusive-start; if( range == 0 ) // nothing to do here! return; if( range < 0 ) throw new IllegalArgumentException("end must be more than start. "+start+" -> "+endExclusive); // Did some experimentation here. Gave it more threads than were needed or exactly what was needed // exactly seemed to do better in the test cases int blockSize = Math.max(1,range/numThreads); try { pool.submit(new IntRangeTask(start,endExclusive,blockSize,consumer)).get(); // depends on control dependency: [try], data = [none] } catch (InterruptedException | ExecutionException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static <C extends Closeable, T extends Iterable<C>> Closeable forIterable2( @Nonnull final Logger log, @Nonnull final Iterable<T> closeables ) { return new Closeable() { public void close() throws IOException { closeAll(log, Iterables.transform(closeables, new Function<T, Closeable>() { public Closeable apply(final @Nullable T input) { if (input == null) { return new Closeable() { public void close() throws IOException {} }; } else { return forIterable(log, input); } } })); } }; } }
public class class_name { public static <C extends Closeable, T extends Iterable<C>> Closeable forIterable2( @Nonnull final Logger log, @Nonnull final Iterable<T> closeables ) { return new Closeable() { public void close() throws IOException { closeAll(log, Iterables.transform(closeables, new Function<T, Closeable>() { public Closeable apply(final @Nullable T input) { if (input == null) { return new Closeable() { public void close() throws IOException {} }; // depends on control dependency: [if], data = [none] } else { return forIterable(log, input); // depends on control dependency: [if], data = [none] } } })); } }; } }
public class class_name { private void readConfigurationFile(final String configFile) { InputStream is; try { is = new FileInputStream(configFile); processConfiguration(MtasConfiguration.readConfiguration(is)); is.close(); } catch (FileNotFoundException e) { log.error("Couldn't find " + configFile, e); } catch (IOException e) { log.error("Couldn't read " + configFile, e); } } }
public class class_name { private void readConfigurationFile(final String configFile) { InputStream is; try { is = new FileInputStream(configFile); // depends on control dependency: [try], data = [none] processConfiguration(MtasConfiguration.readConfiguration(is)); // depends on control dependency: [try], data = [none] is.close(); // depends on control dependency: [try], data = [none] } catch (FileNotFoundException e) { log.error("Couldn't find " + configFile, e); } catch (IOException e) { log.error("Couldn't read " + configFile, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void mapZipFile(final byte[] zipFile, final Map<String, byte[]> output, final String keyPrefix, final String keyUnPrefix) { try { final String fixedKeyPrefix = keyPrefix == null ? "" : keyPrefix; final String fixedKeyUnPrefix = keyUnPrefix == null ? "" : keyUnPrefix; final ZipInputStream zf = new ZipInputStream(new ByteArrayInputStream(zipFile)); ZipEntry ze = null; while ((ze = zf.getNextEntry()) != null) { final String name = ze.getName(); final long size = ze.getSize(); // see if we are working with a file or a directory if (size != 0) { byte[] fileContents = new byte[0]; final byte[] fileBuffer = new byte[BUFFER_SIZE]; int bytesRead = 0; while ((bytesRead = zf.read(fileBuffer, 0, BUFFER_SIZE)) != -1) fileContents = ArrayUtils.addAll(fileContents, bytesRead == BUFFER_SIZE ? fileBuffer : ArrayUtils.subarray(fileBuffer, 0, bytesRead)); output.put(fixedKeyPrefix + name.replace(fixedKeyUnPrefix, ""), fileContents); } } } catch (final IOException ex) { LOG.error("Unable to read file contents", ex); } } }
public class class_name { public static void mapZipFile(final byte[] zipFile, final Map<String, byte[]> output, final String keyPrefix, final String keyUnPrefix) { try { final String fixedKeyPrefix = keyPrefix == null ? "" : keyPrefix; final String fixedKeyUnPrefix = keyUnPrefix == null ? "" : keyUnPrefix; final ZipInputStream zf = new ZipInputStream(new ByteArrayInputStream(zipFile)); ZipEntry ze = null; while ((ze = zf.getNextEntry()) != null) { final String name = ze.getName(); final long size = ze.getSize(); // see if we are working with a file or a directory if (size != 0) { byte[] fileContents = new byte[0]; final byte[] fileBuffer = new byte[BUFFER_SIZE]; int bytesRead = 0; while ((bytesRead = zf.read(fileBuffer, 0, BUFFER_SIZE)) != -1) fileContents = ArrayUtils.addAll(fileContents, bytesRead == BUFFER_SIZE ? fileBuffer : ArrayUtils.subarray(fileBuffer, 0, bytesRead)); output.put(fixedKeyPrefix + name.replace(fixedKeyUnPrefix, ""), fileContents); // depends on control dependency: [if], data = [none] } } } catch (final IOException ex) { LOG.error("Unable to read file contents", ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void bfs(int v, int[] cc, int id) { cc[v] = id; Queue<Integer> queue = new LinkedList<>(); queue.offer(v); while (!queue.isEmpty()) { int t = queue.poll(); for (Edge edge : graph[t]) { int i = edge.v2; if (!digraph && i == t) { i = edge.v1; } if (cc[i] == -1) { queue.offer(i); cc[i] = id; } } } } }
public class class_name { private void bfs(int v, int[] cc, int id) { cc[v] = id; Queue<Integer> queue = new LinkedList<>(); queue.offer(v); while (!queue.isEmpty()) { int t = queue.poll(); for (Edge edge : graph[t]) { int i = edge.v2; if (!digraph && i == t) { i = edge.v1; // depends on control dependency: [if], data = [none] } if (cc[i] == -1) { queue.offer(i); // depends on control dependency: [if], data = [none] cc[i] = id; // depends on control dependency: [if], data = [none] } } } } }
public class class_name { @Override public void reduceToSegmentKeys() { if (segmentRegistration != null && size > 0) { int sizeCopy = size; String[] keyListCopy = keyList.clone(); T1[][] fullValueListCopy = fullValueList.clone(); size = 0; for (int i = 0; i < sizeCopy; i++) { if (segmentKeys.contains(keyListCopy[i])) { keyList[size] = keyListCopy[i]; fullValueList[size] = fullValueListCopy[i]; size++; } } } } }
public class class_name { @Override public void reduceToSegmentKeys() { if (segmentRegistration != null && size > 0) { int sizeCopy = size; String[] keyListCopy = keyList.clone(); T1[][] fullValueListCopy = fullValueList.clone(); size = 0; // depends on control dependency: [if], data = [none] for (int i = 0; i < sizeCopy; i++) { if (segmentKeys.contains(keyListCopy[i])) { keyList[size] = keyListCopy[i]; // depends on control dependency: [if], data = [none] fullValueList[size] = fullValueListCopy[i]; // depends on control dependency: [if], data = [none] size++; // depends on control dependency: [if], data = [none] } } } } }
public class class_name { static List<CMARichMark> resolveMarks(List<Map<String, Object>> rawMarks) { final List<CMARichMark> marks = new ArrayList<>(rawMarks.size()); for (final Map<String, Object> rawMark : rawMarks) { final String type = (String) rawMark.get("type"); if ("bold".equals(type)) { marks.add(new CMARichMark.CMARichMarkBold()); } else if ("italic".equals(type)) { marks.add(new CMARichMark.CMARichMarkItalic()); } else if ("underline".equals(type)) { marks.add(new CMARichMark.CMARichMarkUnderline()); } else if ("code".equals(type)) { marks.add(new CMARichMark.CMARichMarkCode()); } else { marks.add(new CMARichMark.CMARichMarkCustom(type)); } } return marks; } }
public class class_name { static List<CMARichMark> resolveMarks(List<Map<String, Object>> rawMarks) { final List<CMARichMark> marks = new ArrayList<>(rawMarks.size()); for (final Map<String, Object> rawMark : rawMarks) { final String type = (String) rawMark.get("type"); if ("bold".equals(type)) { marks.add(new CMARichMark.CMARichMarkBold()); // depends on control dependency: [if], data = [none] } else if ("italic".equals(type)) { marks.add(new CMARichMark.CMARichMarkItalic()); // depends on control dependency: [if], data = [none] } else if ("underline".equals(type)) { marks.add(new CMARichMark.CMARichMarkUnderline()); // depends on control dependency: [if], data = [none] } else if ("code".equals(type)) { marks.add(new CMARichMark.CMARichMarkCode()); // depends on control dependency: [if], data = [none] } else { marks.add(new CMARichMark.CMARichMarkCustom(type)); // depends on control dependency: [if], data = [none] } } return marks; } }
public class class_name { @Override public void cacheStarting(ComponentRegistry cr, Configuration cfg, String cacheName) { BasicComponentRegistry gcr = cr.getGlobalComponentRegistry().getComponent(BasicComponentRegistry.class); InternalCacheRegistry icr = gcr.getComponent(InternalCacheRegistry.class).running(); if (!icr.isInternalCache(cacheName)) { ProtobufMetadataManagerImpl protobufMetadataManager = (ProtobufMetadataManagerImpl) gcr.getComponent(ProtobufMetadataManager.class).running(); protobufMetadataManager.addCacheDependency(cacheName); SerializationContext serCtx = protobufMetadataManager.getSerializationContext(); RemoteQueryManager remoteQueryManager = buildQueryManager(cfg, serCtx, cr); cr.registerComponent(remoteQueryManager, RemoteQueryManager.class); } } }
public class class_name { @Override public void cacheStarting(ComponentRegistry cr, Configuration cfg, String cacheName) { BasicComponentRegistry gcr = cr.getGlobalComponentRegistry().getComponent(BasicComponentRegistry.class); InternalCacheRegistry icr = gcr.getComponent(InternalCacheRegistry.class).running(); if (!icr.isInternalCache(cacheName)) { ProtobufMetadataManagerImpl protobufMetadataManager = (ProtobufMetadataManagerImpl) gcr.getComponent(ProtobufMetadataManager.class).running(); protobufMetadataManager.addCacheDependency(cacheName); // depends on control dependency: [if], data = [none] SerializationContext serCtx = protobufMetadataManager.getSerializationContext(); RemoteQueryManager remoteQueryManager = buildQueryManager(cfg, serCtx, cr); cr.registerComponent(remoteQueryManager, RemoteQueryManager.class); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public ESat isEntailed() { boolean done = true; for (int i = 0; i < n; i++) { if ((!degrees[i].contains(0)) && !g.getPotentialNodes().contains(i)) { return ESat.FALSE; } ISet env = target.getPotSet(g, i); ISet ker = target.getMandSet(g, i); if (degrees[i].getLB() > env.size() || degrees[i].getUB() < ker.size()) { return ESat.FALSE; } if (env.size() != ker.size() || !degrees[i].isInstantiated()) { done = false; } } if (!done) { return ESat.UNDEFINED; } return ESat.TRUE; } }
public class class_name { @Override public ESat isEntailed() { boolean done = true; for (int i = 0; i < n; i++) { if ((!degrees[i].contains(0)) && !g.getPotentialNodes().contains(i)) { return ESat.FALSE; // depends on control dependency: [if], data = [none] } ISet env = target.getPotSet(g, i); ISet ker = target.getMandSet(g, i); if (degrees[i].getLB() > env.size() || degrees[i].getUB() < ker.size()) { return ESat.FALSE; // depends on control dependency: [if], data = [none] } if (env.size() != ker.size() || !degrees[i].isInstantiated()) { done = false; // depends on control dependency: [if], data = [none] } } if (!done) { return ESat.UNDEFINED; // depends on control dependency: [if], data = [none] } return ESat.TRUE; } }
public class class_name { public static void hideGlassPane(final GlassPaneState state) { Utils4J.checkNotNull("state", state); final Component glassPane = state.getGlassPane(); glassPane.removeMouseListener(state.getMouseListener()); glassPane.setCursor(state.getCursor()); glassPane.setVisible(false); if (state.getFocusOwner() != null) { state.getFocusOwner().requestFocus(); } } }
public class class_name { public static void hideGlassPane(final GlassPaneState state) { Utils4J.checkNotNull("state", state); final Component glassPane = state.getGlassPane(); glassPane.removeMouseListener(state.getMouseListener()); glassPane.setCursor(state.getCursor()); glassPane.setVisible(false); if (state.getFocusOwner() != null) { state.getFocusOwner().requestFocus(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private BeanDiscoveryMode getBeanDiscoveryMode() { if (beanDiscoveryMode == null) { BeansXml beansXml = getBeansXml(); beanDiscoveryMode = BeanDiscoveryMode.ANNOTATED; if (beansXml != null) { beanDiscoveryMode = beansXml.getBeanDiscoveryMode(); } else if ((cdiRuntime.isImplicitBeanArchivesScanningDisabled(this.archive) || isExtension())) { // If the server.xml has the configuration of enableImplicitBeanArchives sets to false, we will not scan the implicit bean archives beanDiscoveryMode = BeanDiscoveryMode.NONE; } } return beanDiscoveryMode; } }
public class class_name { private BeanDiscoveryMode getBeanDiscoveryMode() { if (beanDiscoveryMode == null) { BeansXml beansXml = getBeansXml(); beanDiscoveryMode = BeanDiscoveryMode.ANNOTATED; // depends on control dependency: [if], data = [none] if (beansXml != null) { beanDiscoveryMode = beansXml.getBeanDiscoveryMode(); // depends on control dependency: [if], data = [none] } else if ((cdiRuntime.isImplicitBeanArchivesScanningDisabled(this.archive) || isExtension())) { // If the server.xml has the configuration of enableImplicitBeanArchives sets to false, we will not scan the implicit bean archives beanDiscoveryMode = BeanDiscoveryMode.NONE; // depends on control dependency: [if], data = [none] } } return beanDiscoveryMode; } }
public class class_name { private void register(final AbstractPlugin plugin, final Map<String, HashSet<AbstractPlugin>> holder) { final String rendererId = plugin.getRendererId(); /* * the rendererId support multiple,using ';' to split. * and using Map to match the plugin is not flexible, a regular expression match pattern may be needed in futrue. */ final String[] redererIds = rendererId.split(";"); for (final String rid : redererIds) { final HashSet<AbstractPlugin> set = holder.computeIfAbsent(rid, k -> new HashSet<>()); set.add(plugin); } LOGGER.log(Level.DEBUG, "Registered plugin[name={0}, version={1}] for rendererId[name={2}], [{3}] plugins totally", plugin.getName(), plugin.getVersion(), rendererId, holder.size()); } }
public class class_name { private void register(final AbstractPlugin plugin, final Map<String, HashSet<AbstractPlugin>> holder) { final String rendererId = plugin.getRendererId(); /* * the rendererId support multiple,using ';' to split. * and using Map to match the plugin is not flexible, a regular expression match pattern may be needed in futrue. */ final String[] redererIds = rendererId.split(";"); for (final String rid : redererIds) { final HashSet<AbstractPlugin> set = holder.computeIfAbsent(rid, k -> new HashSet<>()); set.add(plugin); // depends on control dependency: [for], data = [none] } LOGGER.log(Level.DEBUG, "Registered plugin[name={0}, version={1}] for rendererId[name={2}], [{3}] plugins totally", plugin.getName(), plugin.getVersion(), rendererId, holder.size()); } }
public class class_name { public synchronized void shutdown() { if (pool.getConfiguration().isBackgroundValidation() && pool.getConfiguration().getBackgroundValidationMillis() > 0) { ConnectionValidator.getInstance().unregisterPool(this); } if (pool.getConfiguration().getIdleTimeoutMinutes() > 0) { IdleConnectionRemover.getInstance().unregisterPool(this); } for (ConnectionListener cl : listeners) { if (cl.getState() == IN_USE) { // TODO } else if (cl.getState() == DESTROY) { // TODO } try { if (Tracer.isEnabled()) Tracer.clearConnectionListener(pool.getConfiguration().getId(), this, cl); pool.destroyConnectionListener(cl); } catch (ResourceException re) { // TODO cl.setState(ZOMBIE); } } listeners.clear(); } }
public class class_name { public synchronized void shutdown() { if (pool.getConfiguration().isBackgroundValidation() && pool.getConfiguration().getBackgroundValidationMillis() > 0) { ConnectionValidator.getInstance().unregisterPool(this); // depends on control dependency: [if], data = [none] } if (pool.getConfiguration().getIdleTimeoutMinutes() > 0) { IdleConnectionRemover.getInstance().unregisterPool(this); // depends on control dependency: [if], data = [none] } for (ConnectionListener cl : listeners) { if (cl.getState() == IN_USE) { // TODO } else if (cl.getState() == DESTROY) { // TODO } try { if (Tracer.isEnabled()) Tracer.clearConnectionListener(pool.getConfiguration().getId(), this, cl); pool.destroyConnectionListener(cl); // depends on control dependency: [try], data = [none] } catch (ResourceException re) { // TODO cl.setState(ZOMBIE); } // depends on control dependency: [catch], data = [none] } listeners.clear(); } }
public class class_name { public void saveAliases(CmsDbContext dbc, CmsProject project, CmsUUID structureId, List<CmsAlias> aliases) throws CmsException { for (CmsAlias alias : aliases) { if (!structureId.equals(alias.getStructureId())) { throw new IllegalArgumentException("Aliases to replace must have the same structure id!"); } } I_CmsVfsDriver vfsDriver = getVfsDriver(dbc); vfsDriver.deleteAliases(dbc, project, new CmsAliasFilter(null, null, structureId)); for (CmsAlias alias : aliases) { String aliasPath = alias.getAliasPath(); if (CmsAlias.ALIAS_PATTERN.matcher(aliasPath).matches()) { vfsDriver.insertAlias(dbc, project, alias); } else { LOG.error("Invalid alias path: " + aliasPath); } } } }
public class class_name { public void saveAliases(CmsDbContext dbc, CmsProject project, CmsUUID structureId, List<CmsAlias> aliases) throws CmsException { for (CmsAlias alias : aliases) { if (!structureId.equals(alias.getStructureId())) { throw new IllegalArgumentException("Aliases to replace must have the same structure id!"); } } I_CmsVfsDriver vfsDriver = getVfsDriver(dbc); vfsDriver.deleteAliases(dbc, project, new CmsAliasFilter(null, null, structureId)); for (CmsAlias alias : aliases) { String aliasPath = alias.getAliasPath(); if (CmsAlias.ALIAS_PATTERN.matcher(aliasPath).matches()) { vfsDriver.insertAlias(dbc, project, alias); // depends on control dependency: [if], data = [none] } else { LOG.error("Invalid alias path: " + aliasPath); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected final JobSchedulerBuilder scheduleJob(Class<? extends Job> jobClass) { checkNotNull(jobClass, "Argument 'jobClass' must be not null."); if (!RequireUtil.allowClass(getSettings(), jobClass)) { return null; } JobSchedulerBuilder builder = new JobSchedulerBuilder(jobClass); if (jobClass.isAnnotationPresent(Scheduled.class)) { Scheduled scheduled = jobClass.getAnnotation(Scheduled.class); builder // job .withJobName(scheduled.jobName()) .withJobGroup(scheduled.jobGroup()) .withRequestRecovery(scheduled.requestRecovery()) .withStoreDurably(scheduled.storeDurably()) // trigger .withCronExpression(scheduled.cronExpression()) .withTriggerName(scheduled.triggerName()); if (!Scheduled.DEFAULT.equals(scheduled.timeZoneId())) { TimeZone timeZone = TimeZone.getTimeZone(scheduled.timeZoneId()); if (timeZone != null) { builder.withTimeZone(timeZone); } } } requestInjection(builder); return builder; } }
public class class_name { protected final JobSchedulerBuilder scheduleJob(Class<? extends Job> jobClass) { checkNotNull(jobClass, "Argument 'jobClass' must be not null."); if (!RequireUtil.allowClass(getSettings(), jobClass)) { return null; // depends on control dependency: [if], data = [none] } JobSchedulerBuilder builder = new JobSchedulerBuilder(jobClass); if (jobClass.isAnnotationPresent(Scheduled.class)) { Scheduled scheduled = jobClass.getAnnotation(Scheduled.class); builder // job .withJobName(scheduled.jobName()) .withJobGroup(scheduled.jobGroup()) .withRequestRecovery(scheduled.requestRecovery()) .withStoreDurably(scheduled.storeDurably()) // trigger .withCronExpression(scheduled.cronExpression()) .withTriggerName(scheduled.triggerName()); // depends on control dependency: [if], data = [none] if (!Scheduled.DEFAULT.equals(scheduled.timeZoneId())) { TimeZone timeZone = TimeZone.getTimeZone(scheduled.timeZoneId()); if (timeZone != null) { builder.withTimeZone(timeZone); // depends on control dependency: [if], data = [(timeZone] } } } requestInjection(builder); return builder; } }
public class class_name { static String saveGetLocation(WebDriver driver) { logger.entering(driver); String location = "n/a"; try { if (driver != null) { location = driver.getCurrentUrl(); } } catch (Exception exception) { logger.log(Level.FINER, "Current location couldn't be retrieved by getCurrentUrl(). This can be SAFELY " + "IGNORED if testing a non-web mobile application. Reason: ", exception); } logger.exiting(location); return location; } }
public class class_name { static String saveGetLocation(WebDriver driver) { logger.entering(driver); String location = "n/a"; try { if (driver != null) { location = driver.getCurrentUrl(); // depends on control dependency: [if], data = [none] } } catch (Exception exception) { logger.log(Level.FINER, "Current location couldn't be retrieved by getCurrentUrl(). This can be SAFELY " + "IGNORED if testing a non-web mobile application. Reason: ", exception); } // depends on control dependency: [catch], data = [none] logger.exiting(location); return location; } }
public class class_name { public final void fatal(Object message, Throwable t) { if (isLevelEnabled(SimpleLog.LOG_LEVEL_FATAL)) { log(SimpleLog.LOG_LEVEL_FATAL, message, t); } } }
public class class_name { public final void fatal(Object message, Throwable t) { if (isLevelEnabled(SimpleLog.LOG_LEVEL_FATAL)) { log(SimpleLog.LOG_LEVEL_FATAL, message, t); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static boolean matches(String text, String pattern) { if (text == null) { throw new IllegalArgumentException("text cannot be null"); } text += '\0'; pattern += '\0'; int N = pattern.length(); boolean[] states = new boolean[N + 1]; boolean[] old = new boolean[N + 1]; old[0] = true; for (int i = 0; i < text.length(); i++) { char c = text.charAt(i); states = new boolean[N + 1]; // initialized to false for (int j = 0; j < N; j++) { char p = pattern.charAt(j); // hack to handle *'s that match 0 characters if (old[j] && (p == WILDCARD)) old[j + 1] = true; if (old[j] && (p == c)) states[j + 1] = true; if (old[j] && (p == WILDCARD)) states[j] = true; if (old[j] && (p == WILDCARD)) states[j + 1] = true; } old = states; } return states[N]; } }
public class class_name { public static boolean matches(String text, String pattern) { if (text == null) { throw new IllegalArgumentException("text cannot be null"); } text += '\0'; pattern += '\0'; int N = pattern.length(); boolean[] states = new boolean[N + 1]; boolean[] old = new boolean[N + 1]; old[0] = true; for (int i = 0; i < text.length(); i++) { char c = text.charAt(i); states = new boolean[N + 1]; // initialized to false // depends on control dependency: [for], data = [none] for (int j = 0; j < N; j++) { char p = pattern.charAt(j); // hack to handle *'s that match 0 characters if (old[j] && (p == WILDCARD)) old[j + 1] = true; if (old[j] && (p == c)) states[j + 1] = true; if (old[j] && (p == WILDCARD)) states[j] = true; if (old[j] && (p == WILDCARD)) states[j + 1] = true; } old = states; // depends on control dependency: [for], data = [none] } return states[N]; } }
public class class_name { E unlinkLast() { final E l = last; final E prev = l.getPrevious(); l.setPrevious(null); last = prev; if (prev == null) { first = null; } else { prev.setNext(null); } return l; } }
public class class_name { E unlinkLast() { final E l = last; final E prev = l.getPrevious(); l.setPrevious(null); last = prev; if (prev == null) { first = null; // depends on control dependency: [if], data = [none] } else { prev.setNext(null); // depends on control dependency: [if], data = [null)] } return l; } }
public class class_name { public static <C extends Compound> String checksum(Sequence<C> sequence) { CRC64Checksum checksum = new CRC64Checksum(); for (C compound : sequence) { checksum.update(compound.getShortName()); } return checksum.toString(); } }
public class class_name { public static <C extends Compound> String checksum(Sequence<C> sequence) { CRC64Checksum checksum = new CRC64Checksum(); for (C compound : sequence) { checksum.update(compound.getShortName()); // depends on control dependency: [for], data = [compound] } return checksum.toString(); } }
public class class_name { public static String getLocalName(final String uri, final String prefix) { String ns = getNSFromPrefix(prefix); if (ns != null) { return uri.replace(ns, ""); } throw new IllegalArgumentException("Undefined prefix (" + prefix + ") in URI: " + uri); } }
public class class_name { public static String getLocalName(final String uri, final String prefix) { String ns = getNSFromPrefix(prefix); if (ns != null) { return uri.replace(ns, ""); // depends on control dependency: [if], data = [(ns] } throw new IllegalArgumentException("Undefined prefix (" + prefix + ") in URI: " + uri); } }
public class class_name { static Object[] toArguments(List<AnnotatedValueResolver> resolvers, ResolverContext resolverContext) { requireNonNull(resolvers, "resolvers"); requireNonNull(resolverContext, "resolverContext"); if (resolvers.isEmpty()) { return emptyArguments; } return resolvers.stream().map(resolver -> resolver.resolve(resolverContext)).toArray(); } }
public class class_name { static Object[] toArguments(List<AnnotatedValueResolver> resolvers, ResolverContext resolverContext) { requireNonNull(resolvers, "resolvers"); requireNonNull(resolverContext, "resolverContext"); if (resolvers.isEmpty()) { return emptyArguments; // depends on control dependency: [if], data = [none] } return resolvers.stream().map(resolver -> resolver.resolve(resolverContext)).toArray(); } }
public class class_name { public static double bachelierOptionValue( double forward, double volatility, double optionMaturity, double optionStrike, double payoffUnit) { if(optionMaturity < 0) { return 0; } else if(forward == optionStrike) { return volatility * Math.sqrt(optionMaturity / Math.PI / 2.0) * payoffUnit; } else { // Calculate analytic value double dPlus = (forward - optionStrike) / (volatility * Math.sqrt(optionMaturity)); double valueAnalytic = ((forward - optionStrike) * NormalDistribution.cumulativeDistribution(dPlus) + volatility * Math.sqrt(optionMaturity) * NormalDistribution.density(dPlus)) * payoffUnit; return valueAnalytic; } } }
public class class_name { public static double bachelierOptionValue( double forward, double volatility, double optionMaturity, double optionStrike, double payoffUnit) { if(optionMaturity < 0) { return 0; // depends on control dependency: [if], data = [none] } else if(forward == optionStrike) { return volatility * Math.sqrt(optionMaturity / Math.PI / 2.0) * payoffUnit; // depends on control dependency: [if], data = [none] } else { // Calculate analytic value double dPlus = (forward - optionStrike) / (volatility * Math.sqrt(optionMaturity)); double valueAnalytic = ((forward - optionStrike) * NormalDistribution.cumulativeDistribution(dPlus) + volatility * Math.sqrt(optionMaturity) * NormalDistribution.density(dPlus)) * payoffUnit; return valueAnalytic; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static String getTimeInterval(final long pMilliseconds) { long timeIntervalMilliseconds = pMilliseconds; long timeIntervalSeconds = 0; long timeIntervalMinutes = 0; long timeIntervalHours = 0; long timeIntervalDays = 0; boolean printMilliseconds = true; boolean printSeconds = false; boolean printMinutes = false; boolean printHours = false; boolean printDays = false; final long MILLISECONDS_IN_SECOND = 1000; final long MILLISECONDS_IN_MINUTE = 60 * MILLISECONDS_IN_SECOND; // 60000 final long MILLISECONDS_IN_HOUR = 60 * MILLISECONDS_IN_MINUTE; // 3600000 final long MILLISECONDS_IN_DAY = 24 * MILLISECONDS_IN_HOUR; // 86400000 StringBuilder timeIntervalBuffer = new StringBuilder(); // Days if (timeIntervalMilliseconds >= MILLISECONDS_IN_DAY) { timeIntervalDays = timeIntervalMilliseconds / MILLISECONDS_IN_DAY; timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_DAY; printDays = true; printHours = true; printMinutes = true; printSeconds = true; } // Hours if (timeIntervalMilliseconds >= MILLISECONDS_IN_HOUR) { timeIntervalHours = timeIntervalMilliseconds / MILLISECONDS_IN_HOUR; timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_HOUR; printHours = true; printMinutes = true; printSeconds = true; } // Minutes if (timeIntervalMilliseconds >= MILLISECONDS_IN_MINUTE) { timeIntervalMinutes = timeIntervalMilliseconds / MILLISECONDS_IN_MINUTE; timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_MINUTE; printMinutes = true; printSeconds = true; } // Seconds if (timeIntervalMilliseconds >= MILLISECONDS_IN_SECOND) { timeIntervalSeconds = timeIntervalMilliseconds / MILLISECONDS_IN_SECOND; timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_SECOND; printSeconds = true; } // Prettyprint if (printDays) { timeIntervalBuffer.append(timeIntervalDays); if (timeIntervalDays > 1) { timeIntervalBuffer.append("days "); } else { timeIntervalBuffer.append("day "); } } if (printHours) { timeIntervalBuffer.append(timeIntervalHours); timeIntervalBuffer.append("h "); } if (printMinutes) { timeIntervalBuffer.append(timeIntervalMinutes); timeIntervalBuffer.append("m "); } if (printSeconds) { timeIntervalBuffer.append(timeIntervalSeconds); timeIntervalBuffer.append("s "); } if (printMilliseconds) { timeIntervalBuffer.append(timeIntervalMilliseconds); timeIntervalBuffer.append("ms"); } return timeIntervalBuffer.toString(); } }
public class class_name { public static String getTimeInterval(final long pMilliseconds) { long timeIntervalMilliseconds = pMilliseconds; long timeIntervalSeconds = 0; long timeIntervalMinutes = 0; long timeIntervalHours = 0; long timeIntervalDays = 0; boolean printMilliseconds = true; boolean printSeconds = false; boolean printMinutes = false; boolean printHours = false; boolean printDays = false; final long MILLISECONDS_IN_SECOND = 1000; final long MILLISECONDS_IN_MINUTE = 60 * MILLISECONDS_IN_SECOND; // 60000 final long MILLISECONDS_IN_HOUR = 60 * MILLISECONDS_IN_MINUTE; // 3600000 final long MILLISECONDS_IN_DAY = 24 * MILLISECONDS_IN_HOUR; // 86400000 StringBuilder timeIntervalBuffer = new StringBuilder(); // Days if (timeIntervalMilliseconds >= MILLISECONDS_IN_DAY) { timeIntervalDays = timeIntervalMilliseconds / MILLISECONDS_IN_DAY; // depends on control dependency: [if], data = [none] timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_DAY; // depends on control dependency: [if], data = [none] printDays = true; // depends on control dependency: [if], data = [none] printHours = true; // depends on control dependency: [if], data = [none] printMinutes = true; // depends on control dependency: [if], data = [none] printSeconds = true; // depends on control dependency: [if], data = [none] } // Hours if (timeIntervalMilliseconds >= MILLISECONDS_IN_HOUR) { timeIntervalHours = timeIntervalMilliseconds / MILLISECONDS_IN_HOUR; // depends on control dependency: [if], data = [none] timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_HOUR; // depends on control dependency: [if], data = [none] printHours = true; // depends on control dependency: [if], data = [none] printMinutes = true; // depends on control dependency: [if], data = [none] printSeconds = true; // depends on control dependency: [if], data = [none] } // Minutes if (timeIntervalMilliseconds >= MILLISECONDS_IN_MINUTE) { timeIntervalMinutes = timeIntervalMilliseconds / MILLISECONDS_IN_MINUTE; // depends on control dependency: [if], data = [none] timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_MINUTE; // depends on control dependency: [if], data = [none] printMinutes = true; // depends on control dependency: [if], data = [none] printSeconds = true; // depends on control dependency: [if], data = [none] } // Seconds if (timeIntervalMilliseconds >= MILLISECONDS_IN_SECOND) { timeIntervalSeconds = timeIntervalMilliseconds / MILLISECONDS_IN_SECOND; // depends on control dependency: [if], data = [none] timeIntervalMilliseconds = timeIntervalMilliseconds % MILLISECONDS_IN_SECOND; // depends on control dependency: [if], data = [none] printSeconds = true; // depends on control dependency: [if], data = [none] } // Prettyprint if (printDays) { timeIntervalBuffer.append(timeIntervalDays); // depends on control dependency: [if], data = [none] if (timeIntervalDays > 1) { timeIntervalBuffer.append("days "); // depends on control dependency: [if], data = [none] } else { timeIntervalBuffer.append("day "); // depends on control dependency: [if], data = [none] } } if (printHours) { timeIntervalBuffer.append(timeIntervalHours); // depends on control dependency: [if], data = [none] timeIntervalBuffer.append("h "); // depends on control dependency: [if], data = [none] } if (printMinutes) { timeIntervalBuffer.append(timeIntervalMinutes); // depends on control dependency: [if], data = [none] timeIntervalBuffer.append("m "); // depends on control dependency: [if], data = [none] } if (printSeconds) { timeIntervalBuffer.append(timeIntervalSeconds); // depends on control dependency: [if], data = [none] timeIntervalBuffer.append("s "); // depends on control dependency: [if], data = [none] } if (printMilliseconds) { timeIntervalBuffer.append(timeIntervalMilliseconds); // depends on control dependency: [if], data = [none] timeIntervalBuffer.append("ms"); // depends on control dependency: [if], data = [none] } return timeIntervalBuffer.toString(); } }
public class class_name { private int setOptionFlag(String[] args, int i) { if (args[i].equalsIgnoreCase("-PCFG")) { doDep = false; doPCFG = true; i++; } else if (args[i].equalsIgnoreCase("-dep")) { doDep = true; doPCFG = false; i++; } else if (args[i].equalsIgnoreCase("-factored")) { doDep = true; doPCFG = true; testOptions.useFastFactored = false; i++; } else if (args[i].equalsIgnoreCase("-fastFactored")) { doDep = true; doPCFG = true; testOptions.useFastFactored = true; i++; } else if (args[i].equalsIgnoreCase("-noRecoveryTagging")) { testOptions.noRecoveryTagging = true; i++; } else if (args[i].equalsIgnoreCase("-useLexiconToScoreDependencyPwGt")) { testOptions.useLexiconToScoreDependencyPwGt = true; i++; } else if (args[i].equalsIgnoreCase("-useSmoothTagProjection")) { useSmoothTagProjection = true; i++; } else if (args[i].equalsIgnoreCase("-useUnigramWordSmoothing")) { useUnigramWordSmoothing = true; i++; } else if (args[i].equalsIgnoreCase("-useNonProjectiveDependencyParser")) { testOptions.useNonProjectiveDependencyParser = true; i++; } else if (args[i].equalsIgnoreCase("-maxLength") && (i + 1 < args.length)) { testOptions.maxLength = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-MAX_ITEMS") && (i + 1 < args.length)) { testOptions.MAX_ITEMS = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-trainLength") && (i + 1 < args.length)) { // train on only short sentences trainOptions.trainLengthLimit = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-lengthNormalization")) { testOptions.lengthNormalization = true; i++; } else if (args[i].equalsIgnoreCase("-iterativeCKY")) { testOptions.iterativeCKY = true; i++; } else if (args[i].equalsIgnoreCase("-vMarkov") && (i + 1 < args.length)) { int order = Integer.parseInt(args[i + 1]); if (order <= 1) { trainOptions.PA = false; trainOptions.gPA = false; } else if (order == 2) { trainOptions.PA = true; trainOptions.gPA = false; } else if (order >= 3) { trainOptions.PA = true; trainOptions.gPA = true; } i += 2; } else if (args[i].equalsIgnoreCase("-vSelSplitCutOff") && (i + 1 < args.length)) { trainOptions.selectiveSplitCutOff = Double.parseDouble(args[i + 1]); trainOptions.selectiveSplit = trainOptions.selectiveSplitCutOff > 0.0; i += 2; } else if (args[i].equalsIgnoreCase("-vSelPostSplitCutOff") && (i + 1 < args.length)) { trainOptions.selectivePostSplitCutOff = Double.parseDouble(args[i + 1]); trainOptions.selectivePostSplit = trainOptions.selectivePostSplitCutOff > 0.0; i += 2; } else if (args[i].equalsIgnoreCase("-deleteSplitters") && (i+1 < args.length)) { String[] toDel = args[i+1].split(" *, *"); trainOptions.deleteSplitters = new HashSet<String>(Arrays.asList(toDel)); i += 2; } else if (args[i].equalsIgnoreCase("-postSplitWithBaseCategory")) { trainOptions.postSplitWithBaseCategory = true; i += 1; } else if (args[i].equalsIgnoreCase("-vPostMarkov") && (i + 1 < args.length)) { int order = Integer.parseInt(args[i + 1]); if (order <= 1) { trainOptions.postPA = false; trainOptions.postGPA = false; } else if (order == 2) { trainOptions.postPA = true; trainOptions.postGPA = false; } else if (order >= 3) { trainOptions.postPA = true; trainOptions.postGPA = true; } i += 2; } else if (args[i].equalsIgnoreCase("-hMarkov") && (i + 1 < args.length)) { int order = Integer.parseInt(args[i + 1]); if (order >= 0) { trainOptions.markovOrder = order; trainOptions.markovFactor = true; } else { trainOptions.markovFactor = false; } i += 2; } else if (args[i].equalsIgnoreCase("-distanceBins") && (i + 1 < args.length)) { int numBins = Integer.parseInt(args[i + 1]); if (numBins <= 1) { distance = false; } else if (numBins == 4) { distance = true; coarseDistance = true; } else if (numBins == 5) { distance = true; coarseDistance = false; } else { throw new IllegalArgumentException("Invalid value for -distanceBin: " + args[i+1]); } i += 2; } else if (args[i].equalsIgnoreCase("-noStop")) { genStop = false; i++; } else if (args[i].equalsIgnoreCase("-nonDirectional")) { directional = false; i++; } else if (args[i].equalsIgnoreCase("-depWeight") && (i + 1 < args.length)) { testOptions.depWeight = Double.parseDouble(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-printPCFGkBest") && (i + 1 < args.length)) { testOptions.printPCFGkBest = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-printFactoredKGood") && (i + 1 < args.length)) { testOptions.printFactoredKGood = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-smoothTagsThresh") && (i + 1 < args.length)) { lexOptions.smoothInUnknownsThreshold = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-unseenSmooth") && (i + 1 < args.length)) { testOptions.unseenSmooth = Double.parseDouble(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-fractionBeforeUnseenCounting") && (i + 1 < args.length)) { trainOptions.fractionBeforeUnseenCounting = Double.parseDouble(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-hSelSplitThresh") && (i + 1 < args.length)) { trainOptions.HSEL_CUT = Integer.parseInt(args[i + 1]); trainOptions.hSelSplit = trainOptions.HSEL_CUT > 0; i += 2; } else if (args[i].equalsIgnoreCase("-tagPA")) { trainOptions.tagPA = true; i += 1; } else if (args[i].equalsIgnoreCase("-tagSelSplitCutOff") && (i + 1 < args.length)) { trainOptions.tagSelectiveSplitCutOff = Double.parseDouble(args[i + 1]); trainOptions.tagSelectiveSplit = trainOptions.tagSelectiveSplitCutOff > 0.0; i += 2; } else if (args[i].equalsIgnoreCase("-tagSelPostSplitCutOff") && (i + 1 < args.length)) { trainOptions.tagSelectivePostSplitCutOff = Double.parseDouble(args[i + 1]); trainOptions.tagSelectivePostSplit = trainOptions.tagSelectivePostSplitCutOff > 0.0; i += 2; } else if (args[i].equalsIgnoreCase("-noTagSplit")) { trainOptions.noTagSplit = true; i += 1; } else if (args[i].equalsIgnoreCase("-uwm") && (i + 1 < args.length)) { lexOptions.useUnknownWordSignatures = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-unknownSuffixSize") && (i + 1 < args.length)) { lexOptions.unknownSuffixSize = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-unknownPrefixSize") && (i + 1 < args.length)) { lexOptions.unknownPrefixSize = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-uwModelTrainer") && (i + 1 < args.length)) { lexOptions.uwModelTrainer = args[i+1]; i += 2; } else if (args[i].equalsIgnoreCase("-openClassThreshold") && (i + 1 < args.length)) { trainOptions.openClassTypesThreshold = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-unary") && i+1 < args.length) { trainOptions.markUnary = Integer.parseInt(args[i+1]); i += 2; } else if (args[i].equalsIgnoreCase("-unaryTags")) { trainOptions.markUnaryTags = true; i += 1; } else if (args[i].equalsIgnoreCase("-mutate")) { lexOptions.smartMutation = true; i += 1; } else if (args[i].equalsIgnoreCase("-useUnicodeType")) { lexOptions.useUnicodeType = true; i += 1; } else if (args[i].equalsIgnoreCase("-rightRec")) { trainOptions.rightRec = true; i += 1; } else if (args[i].equalsIgnoreCase("-noRightRec")) { trainOptions.rightRec = false; i += 1; } else if (args[i].equalsIgnoreCase("-preTag")) { testOptions.preTag = true; i += 1; } else if (args[i].equalsIgnoreCase("-forceTags")) { testOptions.forceTags = true; i += 1; } else if (args[i].equalsIgnoreCase("-taggerSerializedFile")) { testOptions.taggerSerializedFile = args[i+1]; i += 2; } else if (args[i].equalsIgnoreCase("-forceTagBeginnings")) { testOptions.forceTagBeginnings = true; i += 1; } else if (args[i].equalsIgnoreCase("-noFunctionalForcing")) { testOptions.noFunctionalForcing = true; i += 1; } else if (args[i].equalsIgnoreCase("-scTags")) { dcTags = false; i += 1; } else if (args[i].equalsIgnoreCase("-dcTags")) { dcTags = true; i += 1; } else if (args[i].equalsIgnoreCase("-basicCategoryTagsInDependencyGrammar")) { trainOptions.basicCategoryTagsInDependencyGrammar = true; i+= 1; } else if (args[i].equalsIgnoreCase("-evalb")) { testOptions.evalb = true; i += 1; } else if (args[i].equalsIgnoreCase("-v") || args[i].equalsIgnoreCase("-verbose")) { testOptions.verbose = true; i += 1; } else if (args[i].equalsIgnoreCase("-outputFilesDirectory") && i+1 < args.length) { testOptions.outputFilesDirectory = args[i+1]; i += 2; } else if (args[i].equalsIgnoreCase("-outputFilesExtension") && i+1 < args.length) { testOptions.outputFilesExtension = args[i+1]; i += 2; } else if (args[i].equalsIgnoreCase("-outputFilesPrefix") && i+1 < args.length) { testOptions.outputFilesPrefix = args[i+1]; i += 2; } else if (args[i].equalsIgnoreCase("-outputkBestEquivocation") && i+1 < args.length) { testOptions.outputkBestEquivocation = args[i+1]; i += 2; } else if (args[i].equalsIgnoreCase("-writeOutputFiles")) { testOptions.writeOutputFiles = true; i += 1; } else if (args[i].equalsIgnoreCase("-printAllBestParses")) { testOptions.printAllBestParses = true; i += 1; } else if (args[i].equalsIgnoreCase("-outputTreeFormat") || args[i].equalsIgnoreCase("-outputFormat")) { testOptions.outputFormat = args[i + 1]; i += 2; } else if (args[i].equalsIgnoreCase("-outputTreeFormatOptions") || args[i].equalsIgnoreCase("-outputFormatOptions")) { testOptions.outputFormatOptions = args[i + 1]; i += 2; } else if (args[i].equalsIgnoreCase("-addMissingFinalPunctuation")) { testOptions.addMissingFinalPunctuation = true; i += 1; } else if (args[i].equalsIgnoreCase("-flexiTag")) { lexOptions.flexiTag = true; i += 1; } else if (args[i].equalsIgnoreCase("-lexiTag")) { lexOptions.flexiTag = false; i += 1; } else if (args[i].equalsIgnoreCase("-useSignatureForKnownSmoothing")) { lexOptions.useSignatureForKnownSmoothing = true; i += 1; } else if (args[i].equalsIgnoreCase("-compactGrammar")) { trainOptions.compactGrammar = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-markFinalStates")) { trainOptions.markFinalStates = args[i + 1].equalsIgnoreCase("true"); i += 2; } else if (args[i].equalsIgnoreCase("-leftToRight")) { trainOptions.leftToRight = args[i + 1].equals("true"); i += 2; } else if (args[i].equalsIgnoreCase("-cnf")) { forceCNF = true; i += 1; } else if(args[i].equalsIgnoreCase("-smoothRules")) { trainOptions.ruleSmoothing = true; trainOptions.ruleSmoothingAlpha = Double.valueOf(args[i+1]); i += 2; } else if (args[i].equalsIgnoreCase("-nodePrune") && i+1 < args.length) { nodePrune = args[i+1].equalsIgnoreCase("true"); i += 2; } else if (args[i].equalsIgnoreCase("-noDoRecovery")) { testOptions.doRecovery = false; i += 1; } else if (args[i].equalsIgnoreCase("-acl03chinese")) { trainOptions.markovOrder = 1; trainOptions.markovFactor = true; // no increment } else if (args[i].equalsIgnoreCase("-wordFunction")) { wordFunction = ReflectionLoading.loadByReflection(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-acl03pcfg")) { doDep = false; doPCFG = true; // lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; trainOptions.PA = true; trainOptions.gPA = false; trainOptions.tagPA = true; trainOptions.tagSelectiveSplit = false; trainOptions.rightRec = true; trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 400.0; trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; lexOptions.useUnknownWordSignatures = 2; lexOptions.flexiTag = true; // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-jenny")) { doDep = false; doPCFG = true; // lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; trainOptions.PA = false; trainOptions.gPA = false; trainOptions.tagPA = false; trainOptions.tagSelectiveSplit = false; trainOptions.rightRec = true; trainOptions.selectiveSplit = false; // trainOptions.selectiveSplitCutOff = 400.0; trainOptions.markovFactor = false; // trainOptions.markovOrder = 2; trainOptions.hSelSplit = false; lexOptions.useUnknownWordSignatures = 2; lexOptions.flexiTag = true; // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-goodPCFG")) { doDep = false; doPCFG = true; // op.lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; trainOptions.PA = true; trainOptions.gPA = false; trainOptions.tagPA = true; trainOptions.tagSelectiveSplit = false; trainOptions.rightRec = true; trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 400.0; trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; lexOptions.useUnknownWordSignatures = 2; lexOptions.flexiTag = true; // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; String[] delSplit = new String[] { "-deleteSplitters", "VP^NP,VP^VP,VP^SINV,VP^SQ" }; if (this.setOptionFlag(delSplit, 0) != 2) { System.err.println("Error processing deleteSplitters"); } // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-linguisticPCFG")) { doDep = false; doPCFG = true; // op.lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; trainOptions.PA = true; trainOptions.gPA = false; trainOptions.tagPA = true; // on at the moment, but iffy trainOptions.tagSelectiveSplit = false; trainOptions.rightRec = false; // not for linguistic trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 400.0; trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; lexOptions.useUnknownWordSignatures = 5; // different from acl03pcfg lexOptions.flexiTag = false; // different from acl03pcfg // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-ijcai03")) { doDep = true; doPCFG = true; trainOptions.markUnary = 0; trainOptions.PA = true; trainOptions.gPA = false; trainOptions.tagPA = false; trainOptions.tagSelectiveSplit = false; trainOptions.rightRec = false; trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 300.0; trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; trainOptions.compactGrammar = 0; /// cdm: May 2005 compacting bad for factored? lexOptions.useUnknownWordSignatures = 2; lexOptions.flexiTag = false; dcTags = true; // op.nodePrune = true; // cdm: May 2005: this doesn't help // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-goodFactored")) { doDep = true; doPCFG = true; trainOptions.markUnary = 0; trainOptions.PA = true; trainOptions.gPA = false; trainOptions.tagPA = false; trainOptions.tagSelectiveSplit = false; trainOptions.rightRec = false; trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 300.0; trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; trainOptions.compactGrammar = 0; /// cdm: May 2005 compacting bad for factored? lexOptions.useUnknownWordSignatures = 5; // different from ijcai03 lexOptions.flexiTag = false; dcTags = true; // op.nodePrune = true; // cdm: May 2005: this doesn't help // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-chineseFactored")) { // Single counting tag->word rewrite is also much better for Chinese // Factored. Bracketing F1 goes up about 0.7%. dcTags = false; lexOptions.useUnicodeType = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; trainOptions.markovFactor = true; trainOptions.HSEL_CUT = 50; // trainOptions.openClassTypesThreshold=1; // so can get unseen punctuation // trainOptions.fractionBeforeUnseenCounting=0.0; // so can get unseen punctuation // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-arabicFactored")) { doDep = true; doPCFG = true; dcTags = false; // "false" seems to help Arabic about 0.1% F1 trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; trainOptions.HSEL_CUT = 75; // 75 bit better than 50, 100 a bit worse trainOptions.PA = true; trainOptions.gPA = false; trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 300.0; trainOptions.markUnary = 1; // Helps PCFG and marginally factLB // trainOptions.compactGrammar = 0; // Doesn't seem to help or only 0.05% F1 lexOptions.useUnknownWordSignatures = 9; lexOptions.unknownPrefixSize = 1; lexOptions.unknownSuffixSize = 1; testOptions.MAX_ITEMS = 500000; // Arabic sentences are long enough that this helps a fraction // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-frenchFactored")) { doDep = true; doPCFG = true; dcTags = false; //wsg2011: Setting to false improves F1 by 0.5% trainOptions.markovFactor = true; trainOptions.markovOrder = 2; trainOptions.hSelSplit = true; trainOptions.HSEL_CUT = 75; trainOptions.PA = true; trainOptions.gPA = false; trainOptions.selectiveSplit = true; trainOptions.selectiveSplitCutOff = 300.0; trainOptions.markUnary = 0; //Unary rule marking bad for french..setting to 0 gives +0.3 F1 lexOptions.useUnknownWordSignatures = 1; lexOptions.unknownPrefixSize = 1; lexOptions.unknownSuffixSize = 2; } else if (args[i].equalsIgnoreCase("-chinesePCFG")) { trainOptions.markovOrder = 2; trainOptions.markovFactor = true; trainOptions.HSEL_CUT = 5; trainOptions.PA = true; trainOptions.gPA = true; trainOptions.selectiveSplit = false; doDep = false; doPCFG = true; // Single counting tag->word rewrite is also much better for Chinese PCFG // Bracketing F1 is up about 2% and tag accuracy about 1% (exact by 6%) dcTags = false; // no increment } else if (args[i].equalsIgnoreCase("-printTT") && (i+1 < args.length)) { trainOptions.printTreeTransformations = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-printAnnotatedRuleCounts")) { trainOptions.printAnnotatedRuleCounts = true; i++; } else if (args[i].equalsIgnoreCase("-printAnnotatedStateCounts")) { trainOptions.printAnnotatedStateCounts = true; i++; } else if (args[i].equalsIgnoreCase("-printAnnotated") && (i + 1 < args.length)) { try { trainOptions.printAnnotatedPW = tlpParams.pw(new FileOutputStream(args[i + 1])); } catch (IOException ioe) { trainOptions.printAnnotatedPW = null; } i += 2; } else if (args[i].equalsIgnoreCase("-printBinarized") && (i + 1 < args.length)) { try { trainOptions.printBinarizedPW = tlpParams.pw(new FileOutputStream(args[i + 1])); } catch (IOException ioe) { trainOptions.printBinarizedPW = null; } i += 2; } else if (args[i].equalsIgnoreCase("-printStates")) { trainOptions.printStates = true; i++; } else if (args[i].equalsIgnoreCase("-preTransformer") && (i + 1 < args.length)) { String[] classes = args[i + 1].split(","); i += 2; if (classes.length == 1) { trainOptions.preTransformer = ReflectionLoading.loadByReflection(classes[0], this); } else if (classes.length > 1) { CompositeTreeTransformer composite = new CompositeTreeTransformer(); trainOptions.preTransformer = composite; for (String clazz : classes) { TreeTransformer transformer = ReflectionLoading.loadByReflection(clazz, this); composite.addTransformer(transformer); } } } else if (args[i].equalsIgnoreCase("-taggedFiles") && (i + 1 < args.length)) { trainOptions.taggedFiles = args[i + 1]; i += 2; } else if (args[i].equalsIgnoreCase("-evals")) { testOptions.evals = StringUtils.stringToProperties(args[i+1], testOptions.evals); i += 2; } else if (args[i].equalsIgnoreCase("-fastFactoredCandidateMultiplier")) { testOptions.fastFactoredCandidateMultiplier = Integer.parseInt(args[i + 1]); i += 2; } else if (args[i].equalsIgnoreCase("-fastFactoredCandidateAddend")) { testOptions.fastFactoredCandidateAddend = Integer.parseInt(args[i + 1]); i += 2; } return i; } }
public class class_name { private int setOptionFlag(String[] args, int i) { if (args[i].equalsIgnoreCase("-PCFG")) { doDep = false; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-dep")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = false; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-factored")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] testOptions.useFastFactored = false; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-fastFactored")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] testOptions.useFastFactored = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-noRecoveryTagging")) { testOptions.noRecoveryTagging = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-useLexiconToScoreDependencyPwGt")) { testOptions.useLexiconToScoreDependencyPwGt = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-useSmoothTagProjection")) { useSmoothTagProjection = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-useUnigramWordSmoothing")) { useUnigramWordSmoothing = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-useNonProjectiveDependencyParser")) { testOptions.useNonProjectiveDependencyParser = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-maxLength") && (i + 1 < args.length)) { testOptions.maxLength = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-MAX_ITEMS") && (i + 1 < args.length)) { testOptions.MAX_ITEMS = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-trainLength") && (i + 1 < args.length)) { // train on only short sentences trainOptions.trainLengthLimit = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-lengthNormalization")) { testOptions.lengthNormalization = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-iterativeCKY")) { testOptions.iterativeCKY = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-vMarkov") && (i + 1 < args.length)) { int order = Integer.parseInt(args[i + 1]); if (order <= 1) { trainOptions.PA = false; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] } else if (order == 2) { trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] } else if (order >= 3) { trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = true; // depends on control dependency: [if], data = [none] } i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-vSelSplitCutOff") && (i + 1 < args.length)) { trainOptions.selectiveSplitCutOff = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = trainOptions.selectiveSplitCutOff > 0.0; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-vSelPostSplitCutOff") && (i + 1 < args.length)) { trainOptions.selectivePostSplitCutOff = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] trainOptions.selectivePostSplit = trainOptions.selectivePostSplitCutOff > 0.0; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-deleteSplitters") && (i+1 < args.length)) { String[] toDel = args[i+1].split(" *, *"); trainOptions.deleteSplitters = new HashSet<String>(Arrays.asList(toDel)); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-postSplitWithBaseCategory")) { trainOptions.postSplitWithBaseCategory = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-vPostMarkov") && (i + 1 < args.length)) { int order = Integer.parseInt(args[i + 1]); if (order <= 1) { trainOptions.postPA = false; // depends on control dependency: [if], data = [none] trainOptions.postGPA = false; // depends on control dependency: [if], data = [none] } else if (order == 2) { trainOptions.postPA = true; // depends on control dependency: [if], data = [none] trainOptions.postGPA = false; // depends on control dependency: [if], data = [none] } else if (order >= 3) { trainOptions.postPA = true; // depends on control dependency: [if], data = [none] trainOptions.postGPA = true; // depends on control dependency: [if], data = [none] } i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-hMarkov") && (i + 1 < args.length)) { int order = Integer.parseInt(args[i + 1]); if (order >= 0) { trainOptions.markovOrder = order; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] } else { trainOptions.markovFactor = false; // depends on control dependency: [if], data = [none] } i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-distanceBins") && (i + 1 < args.length)) { int numBins = Integer.parseInt(args[i + 1]); if (numBins <= 1) { distance = false; // depends on control dependency: [if], data = [none] } else if (numBins == 4) { distance = true; // depends on control dependency: [if], data = [none] coarseDistance = true; // depends on control dependency: [if], data = [none] } else if (numBins == 5) { distance = true; // depends on control dependency: [if], data = [none] coarseDistance = false; // depends on control dependency: [if], data = [none] } else { throw new IllegalArgumentException("Invalid value for -distanceBin: " + args[i+1]); } i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-noStop")) { genStop = false; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-nonDirectional")) { directional = false; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-depWeight") && (i + 1 < args.length)) { testOptions.depWeight = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printPCFGkBest") && (i + 1 < args.length)) { testOptions.printPCFGkBest = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printFactoredKGood") && (i + 1 < args.length)) { testOptions.printFactoredKGood = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-smoothTagsThresh") && (i + 1 < args.length)) { lexOptions.smoothInUnknownsThreshold = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-unseenSmooth") && (i + 1 < args.length)) { testOptions.unseenSmooth = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-fractionBeforeUnseenCounting") && (i + 1 < args.length)) { trainOptions.fractionBeforeUnseenCounting = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-hSelSplitThresh") && (i + 1 < args.length)) { trainOptions.HSEL_CUT = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = trainOptions.HSEL_CUT > 0; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-tagPA")) { trainOptions.tagPA = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-tagSelSplitCutOff") && (i + 1 < args.length)) { trainOptions.tagSelectiveSplitCutOff = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = trainOptions.tagSelectiveSplitCutOff > 0.0; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-tagSelPostSplitCutOff") && (i + 1 < args.length)) { trainOptions.tagSelectivePostSplitCutOff = Double.parseDouble(args[i + 1]); // depends on control dependency: [if], data = [none] trainOptions.tagSelectivePostSplit = trainOptions.tagSelectivePostSplitCutOff > 0.0; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-noTagSplit")) { trainOptions.noTagSplit = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-uwm") && (i + 1 < args.length)) { lexOptions.useUnknownWordSignatures = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-unknownSuffixSize") && (i + 1 < args.length)) { lexOptions.unknownSuffixSize = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-unknownPrefixSize") && (i + 1 < args.length)) { lexOptions.unknownPrefixSize = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-uwModelTrainer") && (i + 1 < args.length)) { lexOptions.uwModelTrainer = args[i+1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-openClassThreshold") && (i + 1 < args.length)) { trainOptions.openClassTypesThreshold = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-unary") && i+1 < args.length) { trainOptions.markUnary = Integer.parseInt(args[i+1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-unaryTags")) { trainOptions.markUnaryTags = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-mutate")) { lexOptions.smartMutation = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-useUnicodeType")) { lexOptions.useUnicodeType = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-rightRec")) { trainOptions.rightRec = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-noRightRec")) { trainOptions.rightRec = false; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-preTag")) { testOptions.preTag = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-forceTags")) { testOptions.forceTags = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-taggerSerializedFile")) { testOptions.taggerSerializedFile = args[i+1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-forceTagBeginnings")) { testOptions.forceTagBeginnings = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-noFunctionalForcing")) { testOptions.noFunctionalForcing = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-scTags")) { dcTags = false; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-dcTags")) { dcTags = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-basicCategoryTagsInDependencyGrammar")) { trainOptions.basicCategoryTagsInDependencyGrammar = true; // depends on control dependency: [if], data = [none] i+= 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-evalb")) { testOptions.evalb = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-v") || args[i].equalsIgnoreCase("-verbose")) { testOptions.verbose = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-outputFilesDirectory") && i+1 < args.length) { testOptions.outputFilesDirectory = args[i+1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-outputFilesExtension") && i+1 < args.length) { testOptions.outputFilesExtension = args[i+1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-outputFilesPrefix") && i+1 < args.length) { testOptions.outputFilesPrefix = args[i+1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-outputkBestEquivocation") && i+1 < args.length) { testOptions.outputkBestEquivocation = args[i+1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-writeOutputFiles")) { testOptions.writeOutputFiles = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printAllBestParses")) { testOptions.printAllBestParses = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-outputTreeFormat") || args[i].equalsIgnoreCase("-outputFormat")) { testOptions.outputFormat = args[i + 1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-outputTreeFormatOptions") || args[i].equalsIgnoreCase("-outputFormatOptions")) { testOptions.outputFormatOptions = args[i + 1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-addMissingFinalPunctuation")) { testOptions.addMissingFinalPunctuation = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-flexiTag")) { lexOptions.flexiTag = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-lexiTag")) { lexOptions.flexiTag = false; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-useSignatureForKnownSmoothing")) { lexOptions.useSignatureForKnownSmoothing = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-compactGrammar")) { trainOptions.compactGrammar = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-markFinalStates")) { trainOptions.markFinalStates = args[i + 1].equalsIgnoreCase("true"); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-leftToRight")) { trainOptions.leftToRight = args[i + 1].equals("true"); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-cnf")) { forceCNF = true; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if(args[i].equalsIgnoreCase("-smoothRules")) { trainOptions.ruleSmoothing = true; // depends on control dependency: [if], data = [none] trainOptions.ruleSmoothingAlpha = Double.valueOf(args[i+1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-nodePrune") && i+1 < args.length) { nodePrune = args[i+1].equalsIgnoreCase("true"); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-noDoRecovery")) { testOptions.doRecovery = false; // depends on control dependency: [if], data = [none] i += 1; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-acl03chinese")) { trainOptions.markovOrder = 1; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] // no increment } else if (args[i].equalsIgnoreCase("-wordFunction")) { wordFunction = ReflectionLoading.loadByReflection(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-acl03pcfg")) { doDep = false; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] // lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagPA = true; // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = false; // depends on control dependency: [if], data = [none] trainOptions.rightRec = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 400.0; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 2; // depends on control dependency: [if], data = [none] lexOptions.flexiTag = true; // depends on control dependency: [if], data = [none] // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // depends on control dependency: [if], data = [none] // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-jenny")) { doDep = false; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] // lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; // depends on control dependency: [if], data = [none] trainOptions.PA = false; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = false; // depends on control dependency: [if], data = [none] trainOptions.rightRec = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = false; // depends on control dependency: [if], data = [none] // trainOptions.selectiveSplitCutOff = 400.0; trainOptions.markovFactor = false; // depends on control dependency: [if], data = [none] // trainOptions.markovOrder = 2; trainOptions.hSelSplit = false; // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 2; // depends on control dependency: [if], data = [none] lexOptions.flexiTag = true; // depends on control dependency: [if], data = [none] // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // depends on control dependency: [if], data = [none] // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-goodPCFG")) { doDep = false; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] // op.lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagPA = true; // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = false; // depends on control dependency: [if], data = [none] trainOptions.rightRec = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 400.0; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 2; // depends on control dependency: [if], data = [none] lexOptions.flexiTag = true; // depends on control dependency: [if], data = [none] // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // depends on control dependency: [if], data = [none] String[] delSplit = new String[] { "-deleteSplitters", "VP^NP,VP^VP,VP^SINV,VP^SQ" }; if (this.setOptionFlag(delSplit, 0) != 2) { System.err.println("Error processing deleteSplitters"); // depends on control dependency: [if], data = [none] } // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-linguisticPCFG")) { doDep = false; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] // op.lexOptions.smoothInUnknownsThreshold = 30; trainOptions.markUnary = 1; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagPA = true; // on at the moment, but iffy // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = false; // depends on control dependency: [if], data = [none] trainOptions.rightRec = false; // not for linguistic // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 400.0; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 5; // different from acl03pcfg // depends on control dependency: [if], data = [none] lexOptions.flexiTag = false; // different from acl03pcfg // depends on control dependency: [if], data = [none] // DAN: Tag double-counting is BAD for PCFG-only parsing dcTags = false; // depends on control dependency: [if], data = [none] // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-ijcai03")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] trainOptions.markUnary = 0; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = false; // depends on control dependency: [if], data = [none] trainOptions.rightRec = false; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 300.0; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] trainOptions.compactGrammar = 0; /// cdm: May 2005 compacting bad for factored? // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 2; // depends on control dependency: [if], data = [none] lexOptions.flexiTag = false; // depends on control dependency: [if], data = [none] dcTags = true; // depends on control dependency: [if], data = [none] // op.nodePrune = true; // cdm: May 2005: this doesn't help // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-goodFactored")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] trainOptions.markUnary = 0; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagPA = false; // depends on control dependency: [if], data = [none] trainOptions.tagSelectiveSplit = false; // depends on control dependency: [if], data = [none] trainOptions.rightRec = false; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 300.0; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] trainOptions.compactGrammar = 0; /// cdm: May 2005 compacting bad for factored? // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 5; // different from ijcai03 // depends on control dependency: [if], data = [none] lexOptions.flexiTag = false; // depends on control dependency: [if], data = [none] dcTags = true; // depends on control dependency: [if], data = [none] // op.nodePrune = true; // cdm: May 2005: this doesn't help // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-chineseFactored")) { // Single counting tag->word rewrite is also much better for Chinese // Factored. Bracketing F1 goes up about 0.7%. dcTags = false; // depends on control dependency: [if], data = [none] lexOptions.useUnicodeType = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.HSEL_CUT = 50; // depends on control dependency: [if], data = [none] // trainOptions.openClassTypesThreshold=1; // so can get unseen punctuation // trainOptions.fractionBeforeUnseenCounting=0.0; // so can get unseen punctuation // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-arabicFactored")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] dcTags = false; // "false" seems to help Arabic about 0.1% F1 // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] trainOptions.HSEL_CUT = 75; // 75 bit better than 50, 100 a bit worse // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 300.0; // depends on control dependency: [if], data = [none] trainOptions.markUnary = 1; // Helps PCFG and marginally factLB // depends on control dependency: [if], data = [none] // trainOptions.compactGrammar = 0; // Doesn't seem to help or only 0.05% F1 lexOptions.useUnknownWordSignatures = 9; // depends on control dependency: [if], data = [none] lexOptions.unknownPrefixSize = 1; // depends on control dependency: [if], data = [none] lexOptions.unknownSuffixSize = 1; // depends on control dependency: [if], data = [none] testOptions.MAX_ITEMS = 500000; // Arabic sentences are long enough that this helps a fraction // depends on control dependency: [if], data = [none] // don't increment i so it gets language specific stuff as well } else if (args[i].equalsIgnoreCase("-frenchFactored")) { doDep = true; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] dcTags = false; //wsg2011: Setting to false improves F1 by 0.5% // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.hSelSplit = true; // depends on control dependency: [if], data = [none] trainOptions.HSEL_CUT = 75; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = false; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplitCutOff = 300.0; // depends on control dependency: [if], data = [none] trainOptions.markUnary = 0; //Unary rule marking bad for french..setting to 0 gives +0.3 F1 // depends on control dependency: [if], data = [none] lexOptions.useUnknownWordSignatures = 1; // depends on control dependency: [if], data = [none] lexOptions.unknownPrefixSize = 1; // depends on control dependency: [if], data = [none] lexOptions.unknownSuffixSize = 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-chinesePCFG")) { trainOptions.markovOrder = 2; // depends on control dependency: [if], data = [none] trainOptions.markovFactor = true; // depends on control dependency: [if], data = [none] trainOptions.HSEL_CUT = 5; // depends on control dependency: [if], data = [none] trainOptions.PA = true; // depends on control dependency: [if], data = [none] trainOptions.gPA = true; // depends on control dependency: [if], data = [none] trainOptions.selectiveSplit = false; // depends on control dependency: [if], data = [none] doDep = false; // depends on control dependency: [if], data = [none] doPCFG = true; // depends on control dependency: [if], data = [none] // Single counting tag->word rewrite is also much better for Chinese PCFG // Bracketing F1 is up about 2% and tag accuracy about 1% (exact by 6%) dcTags = false; // depends on control dependency: [if], data = [none] // no increment } else if (args[i].equalsIgnoreCase("-printTT") && (i+1 < args.length)) { trainOptions.printTreeTransformations = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printAnnotatedRuleCounts")) { trainOptions.printAnnotatedRuleCounts = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printAnnotatedStateCounts")) { trainOptions.printAnnotatedStateCounts = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printAnnotated") && (i + 1 < args.length)) { try { trainOptions.printAnnotatedPW = tlpParams.pw(new FileOutputStream(args[i + 1])); // depends on control dependency: [try], data = [none] } catch (IOException ioe) { trainOptions.printAnnotatedPW = null; } // depends on control dependency: [catch], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printBinarized") && (i + 1 < args.length)) { try { trainOptions.printBinarizedPW = tlpParams.pw(new FileOutputStream(args[i + 1])); // depends on control dependency: [try], data = [none] } catch (IOException ioe) { trainOptions.printBinarizedPW = null; } // depends on control dependency: [catch], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-printStates")) { trainOptions.printStates = true; // depends on control dependency: [if], data = [none] i++; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-preTransformer") && (i + 1 < args.length)) { String[] classes = args[i + 1].split(","); i += 2; // depends on control dependency: [if], data = [none] if (classes.length == 1) { trainOptions.preTransformer = ReflectionLoading.loadByReflection(classes[0], this); // depends on control dependency: [if], data = [none] } else if (classes.length > 1) { CompositeTreeTransformer composite = new CompositeTreeTransformer(); trainOptions.preTransformer = composite; // depends on control dependency: [if], data = [none] for (String clazz : classes) { TreeTransformer transformer = ReflectionLoading.loadByReflection(clazz, this); composite.addTransformer(transformer); // depends on control dependency: [for], data = [none] } } } else if (args[i].equalsIgnoreCase("-taggedFiles") && (i + 1 < args.length)) { trainOptions.taggedFiles = args[i + 1]; // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-evals")) { testOptions.evals = StringUtils.stringToProperties(args[i+1], testOptions.evals); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-fastFactoredCandidateMultiplier")) { testOptions.fastFactoredCandidateMultiplier = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } else if (args[i].equalsIgnoreCase("-fastFactoredCandidateAddend")) { testOptions.fastFactoredCandidateAddend = Integer.parseInt(args[i + 1]); // depends on control dependency: [if], data = [none] i += 2; // depends on control dependency: [if], data = [none] } return i; } }
public class class_name { public static CliOutput executeCommandLine(final Commandline cli, final String loggerName, final String logMessagePrefix, final InputStream inputStream, final int timeoutInSeconds) { try { String cliString = CommandLineUtils.toString(cli.getShellCommandline()); LOGGER.info("Executing command-line: {}", cliString); LoggingStreamConsumer stdOut = new LoggingStreamConsumer(loggerName, logMessagePrefix, false); LoggingStreamConsumer stdErr = new LoggingStreamConsumer(loggerName, logMessagePrefix, true); int exitCode = CommandLineUtils.executeCommandLine(cli, inputStream, stdOut, stdErr, timeoutInSeconds); return new CliOutput(stdOut.getOutput(), stdErr.getOutput(), exitCode); } catch (CommandLineException ex) { throw new CliException("Error executing command-line process.", ex); } } }
public class class_name { public static CliOutput executeCommandLine(final Commandline cli, final String loggerName, final String logMessagePrefix, final InputStream inputStream, final int timeoutInSeconds) { try { String cliString = CommandLineUtils.toString(cli.getShellCommandline()); LOGGER.info("Executing command-line: {}", cliString); // depends on control dependency: [try], data = [none] LoggingStreamConsumer stdOut = new LoggingStreamConsumer(loggerName, logMessagePrefix, false); LoggingStreamConsumer stdErr = new LoggingStreamConsumer(loggerName, logMessagePrefix, true); int exitCode = CommandLineUtils.executeCommandLine(cli, inputStream, stdOut, stdErr, timeoutInSeconds); return new CliOutput(stdOut.getOutput(), stdErr.getOutput(), exitCode); // depends on control dependency: [try], data = [none] } catch (CommandLineException ex) { throw new CliException("Error executing command-line process.", ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static int getCount(Matcher matcher) { int counter = 0; matcher.reset(); while (matcher.find()) { counter++; } return counter; } }
public class class_name { public static int getCount(Matcher matcher) { int counter = 0; matcher.reset(); while (matcher.find()) { counter++; // depends on control dependency: [while], data = [none] } return counter; } }
public class class_name { private ScanStatus handleAutoDeployFailures(ScanContext scanContext) { ScanStatus result = ScanStatus.PROCEED; boolean warnLogged = false; Set<File> noLongerIncomplete = new HashSet<File>(incompleteDeployments.keySet()); noLongerIncomplete.removeAll(scanContext.incompleteFiles.keySet()); int oldIncompleteCount = incompleteDeployments.size(); incompleteDeployments.keySet().retainAll(scanContext.incompleteFiles.keySet()); if (scanContext.incompleteFiles.size() > 0) { result = ScanStatus.RETRY; // If user dealt with some incomplete stuff but others remain, log everything again boolean logAll = incompleteDeployments.size() != oldIncompleteCount; long now = System.currentTimeMillis(); for (Map.Entry<File, IncompleteDeploymentStatus> entry : scanContext.incompleteFiles.entrySet()) { File incompleteFile = entry.getKey(); String deploymentName = incompleteFile.getName(); IncompleteDeploymentStatus status = incompleteDeployments.get(incompleteFile); if (status == null || status.size < entry.getValue().size) { status = entry.getValue(); } if (now - status.timestamp > maxNoProgress) { if (!status.warned) { // Treat no progress for an extended period as a failed deployment String suffix = deployed.containsKey(deploymentName) ? DeploymentScannerLogger.ROOT_LOGGER.previousContentDeployed() : ""; String msg = DeploymentScannerLogger.ROOT_LOGGER.deploymentContentIncomplete(incompleteFile, suffix); writeFailedMarker(incompleteFile, msg, status.timestamp); ROOT_LOGGER.error(msg); status.warned = true; warnLogged = true; result = ScanStatus.ABORT; } // Clean up any .pending file new File(incompleteFile.getParentFile(), deploymentName + PENDING).delete(); } else { boolean newIncomplete = incompleteDeployments.put(incompleteFile, status) == null; if (newIncomplete || logAll) { ROOT_LOGGER.incompleteContent(entry.getKey().getPath()); } if (newIncomplete) { File pending = new File(incompleteFile.getParentFile(), deploymentName + PENDING); createMarkerFile(pending, deploymentName); } } } } // Clean out any old "pending" files for (File complete : noLongerIncomplete) { File pending = new File(complete.getParentFile(), complete.getName() + PENDING); removeExtraneousMarker(pending, pending.getName()); } int oldNonScannableCount = nonscannableLogged.size(); nonscannableLogged.retainAll(scanContext.nonscannable.keySet()); if (scanContext.nonscannable.size() > 0) { result = (result == ScanStatus.PROCEED ? ScanStatus.RETRY : result); // If user dealt with some nonscannable stuff but others remain, log everything again boolean logAll = nonscannableLogged.size() != oldNonScannableCount; for (Map.Entry<File, NonScannableStatus> entry : scanContext.nonscannable.entrySet()) { File nonScannable = entry.getKey(); String fileName = nonScannable.getName(); if (nonscannableLogged.add(nonScannable) || logAll) { NonScannableStatus nonScannableStatus = entry.getValue(); NonScannableZipException e = nonScannableStatus.exception; String msg = DeploymentScannerLogger.ROOT_LOGGER.unsafeAutoDeploy2(e.getLocalizedMessage(), fileName, DO_DEPLOY); writeFailedMarker(nonScannable, msg, nonScannableStatus.timestamp); ROOT_LOGGER.error(msg); warnLogged = true; result = ScanStatus.ABORT; } } } if (warnLogged) { Set<String> allProblems = new HashSet<String>(); for (File f : scanContext.nonscannable.keySet()) { allProblems.add(f.getName()); } for (File f : scanContext.incompleteFiles.keySet()) { allProblems.add(f.getName()); } ROOT_LOGGER.unsafeAutoDeploy(DO_DEPLOY, SKIP_DEPLOY, allProblems); } return result; } }
public class class_name { private ScanStatus handleAutoDeployFailures(ScanContext scanContext) { ScanStatus result = ScanStatus.PROCEED; boolean warnLogged = false; Set<File> noLongerIncomplete = new HashSet<File>(incompleteDeployments.keySet()); noLongerIncomplete.removeAll(scanContext.incompleteFiles.keySet()); int oldIncompleteCount = incompleteDeployments.size(); incompleteDeployments.keySet().retainAll(scanContext.incompleteFiles.keySet()); if (scanContext.incompleteFiles.size() > 0) { result = ScanStatus.RETRY; // depends on control dependency: [if], data = [none] // If user dealt with some incomplete stuff but others remain, log everything again boolean logAll = incompleteDeployments.size() != oldIncompleteCount; long now = System.currentTimeMillis(); for (Map.Entry<File, IncompleteDeploymentStatus> entry : scanContext.incompleteFiles.entrySet()) { File incompleteFile = entry.getKey(); String deploymentName = incompleteFile.getName(); IncompleteDeploymentStatus status = incompleteDeployments.get(incompleteFile); if (status == null || status.size < entry.getValue().size) { status = entry.getValue(); // depends on control dependency: [if], data = [none] } if (now - status.timestamp > maxNoProgress) { if (!status.warned) { // Treat no progress for an extended period as a failed deployment String suffix = deployed.containsKey(deploymentName) ? DeploymentScannerLogger.ROOT_LOGGER.previousContentDeployed() : ""; String msg = DeploymentScannerLogger.ROOT_LOGGER.deploymentContentIncomplete(incompleteFile, suffix); writeFailedMarker(incompleteFile, msg, status.timestamp); // depends on control dependency: [if], data = [none] ROOT_LOGGER.error(msg); // depends on control dependency: [if], data = [none] status.warned = true; // depends on control dependency: [if], data = [none] warnLogged = true; // depends on control dependency: [if], data = [none] result = ScanStatus.ABORT; // depends on control dependency: [if], data = [none] } // Clean up any .pending file new File(incompleteFile.getParentFile(), deploymentName + PENDING).delete(); // depends on control dependency: [if], data = [none] } else { boolean newIncomplete = incompleteDeployments.put(incompleteFile, status) == null; if (newIncomplete || logAll) { ROOT_LOGGER.incompleteContent(entry.getKey().getPath()); // depends on control dependency: [if], data = [none] } if (newIncomplete) { File pending = new File(incompleteFile.getParentFile(), deploymentName + PENDING); createMarkerFile(pending, deploymentName); // depends on control dependency: [if], data = [none] } } } } // Clean out any old "pending" files for (File complete : noLongerIncomplete) { File pending = new File(complete.getParentFile(), complete.getName() + PENDING); removeExtraneousMarker(pending, pending.getName()); // depends on control dependency: [for], data = [none] } int oldNonScannableCount = nonscannableLogged.size(); nonscannableLogged.retainAll(scanContext.nonscannable.keySet()); if (scanContext.nonscannable.size() > 0) { result = (result == ScanStatus.PROCEED ? ScanStatus.RETRY : result); // depends on control dependency: [if], data = [none] // If user dealt with some nonscannable stuff but others remain, log everything again boolean logAll = nonscannableLogged.size() != oldNonScannableCount; for (Map.Entry<File, NonScannableStatus> entry : scanContext.nonscannable.entrySet()) { File nonScannable = entry.getKey(); String fileName = nonScannable.getName(); if (nonscannableLogged.add(nonScannable) || logAll) { NonScannableStatus nonScannableStatus = entry.getValue(); NonScannableZipException e = nonScannableStatus.exception; String msg = DeploymentScannerLogger.ROOT_LOGGER.unsafeAutoDeploy2(e.getLocalizedMessage(), fileName, DO_DEPLOY); writeFailedMarker(nonScannable, msg, nonScannableStatus.timestamp); // depends on control dependency: [if], data = [none] ROOT_LOGGER.error(msg); // depends on control dependency: [if], data = [none] warnLogged = true; // depends on control dependency: [if], data = [none] result = ScanStatus.ABORT; // depends on control dependency: [if], data = [none] } } } if (warnLogged) { Set<String> allProblems = new HashSet<String>(); for (File f : scanContext.nonscannable.keySet()) { allProblems.add(f.getName()); // depends on control dependency: [for], data = [f] } for (File f : scanContext.incompleteFiles.keySet()) { allProblems.add(f.getName()); // depends on control dependency: [for], data = [f] } ROOT_LOGGER.unsafeAutoDeploy(DO_DEPLOY, SKIP_DEPLOY, allProblems); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public final void info(String message, Throwable throwable) { if (isInfoEnabled()) { out.print( "[ maven embedder INFO] " ); out.println( message ); if (null != throwable) { throwable.printStackTrace( out ); } } } }
public class class_name { public final void info(String message, Throwable throwable) { if (isInfoEnabled()) { out.print( "[ maven embedder INFO] " ); // depends on control dependency: [if], data = [none] out.println( message ); // depends on control dependency: [if], data = [none] if (null != throwable) { throwable.printStackTrace( out ); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static byte[] deflater(final byte[] inputByte) throws IOException { int compressedDataLength = 0; Deflater compresser = new Deflater(); compresser.setInput(inputByte); compresser.finish(); ByteArrayOutputStream o = new ByteArrayOutputStream(inputByte.length); byte[] result = new byte[1024]; try { while (!compresser.finished()) { compressedDataLength = compresser.deflate(result); o.write(result, 0, compressedDataLength); } } finally { o.close(); } compresser.end(); return o.toByteArray(); } }
public class class_name { public static byte[] deflater(final byte[] inputByte) throws IOException { int compressedDataLength = 0; Deflater compresser = new Deflater(); compresser.setInput(inputByte); compresser.finish(); ByteArrayOutputStream o = new ByteArrayOutputStream(inputByte.length); byte[] result = new byte[1024]; try { while (!compresser.finished()) { compressedDataLength = compresser.deflate(result); // depends on control dependency: [while], data = [none] o.write(result, 0, compressedDataLength); // depends on control dependency: [while], data = [none] } } finally { o.close(); } compresser.end(); return o.toByteArray(); } }
public class class_name { protected void update() { //Making more granual locking so that clusterStatus can be fetched from Jobtracker. ClusterStatus clusterStatus = taskTrackerManager.getClusterStatus(); // Recompute locality delay from JobTracker heartbeat interval if enabled. // This will also lock the JT, so do it outside of a fair scheduler lock. if (autoComputeLocalityDelay) { JobTracker jobTracker = (JobTracker) taskTrackerManager; localityDelayNodeLocal = Math.min(MAX_AUTOCOMPUTED_LOCALITY_DELAY, (long) (1.5 * jobTracker.getNextHeartbeatInterval())); localityDelayRackLocal = localityDelayNodeLocal; } // Got clusterStatus hence acquiring scheduler lock now // Remove non-running jobs synchronized(this){ // Reload allocations file if it hasn't been loaded in a while if (poolMgr.reloadAllocsIfNecessary()) { // Check if the cluster have enough slots for reserving poolMgr.checkMinimumSlotsAvailable(clusterStatus, TaskType.MAP); poolMgr.checkMinimumSlotsAvailable(clusterStatus, TaskType.REDUCE); } List<JobInProgress> toRemove = new ArrayList<JobInProgress>(); for (JobInProgress job: infos.keySet()) { int runState = job.getStatus().getRunState(); if (runState == JobStatus.SUCCEEDED || runState == JobStatus.FAILED || runState == JobStatus.KILLED) { toRemove.add(job); } } for (JobInProgress job: toRemove) { infos.remove(job); poolMgr.removeJob(job); } // Update running jobs with deficits since last update, and compute new // slot allocations, weight, shares and task counts long now = clock.getTime(); long timeDelta = now - lastUpdateTime; updateDeficits(timeDelta); updateRunnability(); updateTaskCounts(); updateWeights(); updateMinAndMaxSlots(); updateFairShares(clusterStatus); if (preemptionEnabled) { updatePreemptionVariables(); } sortJobs(); updatePoolMetrics(); dumpStatus(now); lastUpdateTime = now; } } }
public class class_name { protected void update() { //Making more granual locking so that clusterStatus can be fetched from Jobtracker. ClusterStatus clusterStatus = taskTrackerManager.getClusterStatus(); // Recompute locality delay from JobTracker heartbeat interval if enabled. // This will also lock the JT, so do it outside of a fair scheduler lock. if (autoComputeLocalityDelay) { JobTracker jobTracker = (JobTracker) taskTrackerManager; localityDelayNodeLocal = Math.min(MAX_AUTOCOMPUTED_LOCALITY_DELAY, (long) (1.5 * jobTracker.getNextHeartbeatInterval())); // depends on control dependency: [if], data = [none] localityDelayRackLocal = localityDelayNodeLocal; // depends on control dependency: [if], data = [none] } // Got clusterStatus hence acquiring scheduler lock now // Remove non-running jobs synchronized(this){ // Reload allocations file if it hasn't been loaded in a while if (poolMgr.reloadAllocsIfNecessary()) { // Check if the cluster have enough slots for reserving poolMgr.checkMinimumSlotsAvailable(clusterStatus, TaskType.MAP); // depends on control dependency: [if], data = [none] poolMgr.checkMinimumSlotsAvailable(clusterStatus, TaskType.REDUCE); // depends on control dependency: [if], data = [none] } List<JobInProgress> toRemove = new ArrayList<JobInProgress>(); for (JobInProgress job: infos.keySet()) { int runState = job.getStatus().getRunState(); if (runState == JobStatus.SUCCEEDED || runState == JobStatus.FAILED || runState == JobStatus.KILLED) { toRemove.add(job); // depends on control dependency: [if], data = [none] } } for (JobInProgress job: toRemove) { infos.remove(job); // depends on control dependency: [for], data = [job] poolMgr.removeJob(job); // depends on control dependency: [for], data = [job] } // Update running jobs with deficits since last update, and compute new // slot allocations, weight, shares and task counts long now = clock.getTime(); long timeDelta = now - lastUpdateTime; updateDeficits(timeDelta); updateRunnability(); updateTaskCounts(); updateWeights(); updateMinAndMaxSlots(); updateFairShares(clusterStatus); if (preemptionEnabled) { updatePreemptionVariables(); // depends on control dependency: [if], data = [none] } sortJobs(); updatePoolMetrics(); dumpStatus(now); lastUpdateTime = now; } } }
public class class_name { public void close() { if (closed) { log.debug("Already closed, nothing to do"); return; } closed = true; if (scope != null) { log.debug("Close, disconnect from scope, and children"); try { // unregister all child scopes first for (IBasicScope basicScope : basicScopes) { unregisterBasicScope(basicScope); } } catch (Exception err) { log.error("Error while unregistering basic scopes", err); } // disconnect if (scope != null) { try { scope.disconnect(this); } catch (Exception err) { log.error("Error while disconnecting from scope: {}. {}", scope, err); } scope = null; } } // unregister client if (client != null && client instanceof Client) { ((Client) client).unregister(this); } // alert our listeners if (connectionListeners != null) { for (IConnectionListener listener : connectionListeners) { listener.notifyDisconnected(this); } connectionListeners.clear(); connectionListeners = null; } } }
public class class_name { public void close() { if (closed) { log.debug("Already closed, nothing to do"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } closed = true; if (scope != null) { log.debug("Close, disconnect from scope, and children"); // depends on control dependency: [if], data = [none] try { // unregister all child scopes first for (IBasicScope basicScope : basicScopes) { unregisterBasicScope(basicScope); // depends on control dependency: [for], data = [basicScope] } } catch (Exception err) { log.error("Error while unregistering basic scopes", err); } // depends on control dependency: [catch], data = [none] // disconnect if (scope != null) { try { scope.disconnect(this); // depends on control dependency: [try], data = [none] } catch (Exception err) { log.error("Error while disconnecting from scope: {}. {}", scope, err); } // depends on control dependency: [catch], data = [none] scope = null; // depends on control dependency: [if], data = [none] } } // unregister client if (client != null && client instanceof Client) { ((Client) client).unregister(this); // depends on control dependency: [if], data = [none] } // alert our listeners if (connectionListeners != null) { for (IConnectionListener listener : connectionListeners) { listener.notifyDisconnected(this); // depends on control dependency: [for], data = [listener] } connectionListeners.clear(); // depends on control dependency: [if], data = [none] connectionListeners = null; // depends on control dependency: [if], data = [none] } } }
public class class_name { public IWizardElement getPrevious() { IWizardElement ret = null; for (final IWizardElement uiObject : this.elements) { if (uiObject == this.current) { break; } else { ret = uiObject; } } this.current = ret; return ret; } }
public class class_name { public IWizardElement getPrevious() { IWizardElement ret = null; for (final IWizardElement uiObject : this.elements) { if (uiObject == this.current) { break; } else { ret = uiObject; // depends on control dependency: [if], data = [none] } } this.current = ret; return ret; } }
public class class_name { private static String getFileExtension(Resource file) { String name = file.getName(); String[] arr; try { arr = ListUtil.toStringArray(ListUtil.listToArrayRemoveEmpty(name, '.')); } catch (PageException e) { arr = null; } if (arr.length < 2) return ""; return arr[arr.length - 1]; } }
public class class_name { private static String getFileExtension(Resource file) { String name = file.getName(); String[] arr; try { arr = ListUtil.toStringArray(ListUtil.listToArrayRemoveEmpty(name, '.')); // depends on control dependency: [try], data = [none] } catch (PageException e) { arr = null; } // depends on control dependency: [catch], data = [none] if (arr.length < 2) return ""; return arr[arr.length - 1]; } }
public class class_name { public static String getTagValue(Iterable<Tag> tags, String k) { Preconditions.checkNotNull(tags, "tags"); Preconditions.checkNotNull(k, "key"); for (Tag t : tags) { if (k.equals(t.key())) { return t.value(); } } return null; } }
public class class_name { public static String getTagValue(Iterable<Tag> tags, String k) { Preconditions.checkNotNull(tags, "tags"); Preconditions.checkNotNull(k, "key"); for (Tag t : tags) { if (k.equals(t.key())) { return t.value(); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public void writeOutput(DataPipe cr) { String[] nextLine = new String[cr.getDataMap().entrySet().size()]; int count = 0; for (Map.Entry<String, String> entry : cr.getDataMap().entrySet()) { nextLine[count] = entry.getValue(); count++; } csvFile.writeNext(nextLine); } }
public class class_name { public void writeOutput(DataPipe cr) { String[] nextLine = new String[cr.getDataMap().entrySet().size()]; int count = 0; for (Map.Entry<String, String> entry : cr.getDataMap().entrySet()) { nextLine[count] = entry.getValue(); // depends on control dependency: [for], data = [entry] count++; // depends on control dependency: [for], data = [none] } csvFile.writeNext(nextLine); } }
public class class_name { private String escapeSpecialCharacters(final String string) { final StringBuilder escaped = new StringBuilder(); for (final char character : string.toCharArray()) { if (character == '%' || character == '_' || character == '\\') { escaped.append('\\'); } escaped.append(character); } return escaped.toString(); } }
public class class_name { private String escapeSpecialCharacters(final String string) { final StringBuilder escaped = new StringBuilder(); for (final char character : string.toCharArray()) { if (character == '%' || character == '_' || character == '\\') { escaped.append('\\'); // depends on control dependency: [if], data = [none] } escaped.append(character); // depends on control dependency: [for], data = [character] } return escaped.toString(); } }
public class class_name { public static AbstractTopology getTopology(Map<Integer, HostInfo> hostInfos, Set<Integer> missingHosts, int kfactor, boolean restorePartition ) { TopologyBuilder builder = addPartitionsToHosts(hostInfos, missingHosts, kfactor, 0); AbstractTopology topo = new AbstractTopology(EMPTY_TOPOLOGY, builder); if (restorePartition && hostInfos.size() == topo.getHostCount()) { topo = mutateRestorePartitionsForRecovery(topo, hostInfos, missingHosts); } return topo; } }
public class class_name { public static AbstractTopology getTopology(Map<Integer, HostInfo> hostInfos, Set<Integer> missingHosts, int kfactor, boolean restorePartition ) { TopologyBuilder builder = addPartitionsToHosts(hostInfos, missingHosts, kfactor, 0); AbstractTopology topo = new AbstractTopology(EMPTY_TOPOLOGY, builder); if (restorePartition && hostInfos.size() == topo.getHostCount()) { topo = mutateRestorePartitionsForRecovery(topo, hostInfos, missingHosts); // depends on control dependency: [if], data = [none] } return topo; } }
public class class_name { @Override List<String> getRequestParts() { List<String> ret = super.getRequestParts(); ret.add(operation); if (arguments.size() > 0) { for (int i = 0; i < arguments.size(); i++) { ret.add(serializeArgumentToRequestPart(arguments.get(i))); } } return ret; } }
public class class_name { @Override List<String> getRequestParts() { List<String> ret = super.getRequestParts(); ret.add(operation); if (arguments.size() > 0) { for (int i = 0; i < arguments.size(); i++) { ret.add(serializeArgumentToRequestPart(arguments.get(i))); // depends on control dependency: [for], data = [i] } } return ret; } }
public class class_name { public static SymbolizerTypeInfo createSymbolizer(LayerType type, FeatureStyleInfo featureStyle) { SymbolInfo symbol = featureStyle.getSymbol(); SymbolizerTypeInfo symbolizer = null; StrokeInfo stroke = createStroke(featureStyle.getStrokeColor(), featureStyle.getStrokeWidth(), featureStyle.getStrokeOpacity(), featureStyle.getDashArray()); FillInfo fill = createFill(featureStyle.getFillColor(), featureStyle.getFillOpacity()); switch (type) { case GEOMETRY: break; case LINESTRING: case MULTILINESTRING: symbolizer = createLineSymbolizer(stroke); break; case MULTIPOINT: case POINT: GraphicInfo graphic; if (symbol.getCircle() != null) { MarkInfo circle = createMark(WKN_CIRCLE, fill, stroke); graphic = createGraphic(circle, (int) (2 * symbol.getCircle().getR())); } else if (symbol.getRect() != null) { MarkInfo rect = createMark(WKN_RECT, fill, stroke); graphic = createGraphic(rect, (int) symbol.getRect().getH()); } else { ExternalGraphicInfo image = createExternalGraphic(symbol.getImage().getHref()); graphic = createGraphic(image, symbol.getImage().getHeight()); } symbolizer = createPointSymbolizer(graphic); break; case POLYGON: case MULTIPOLYGON: symbolizer = createPolygonSymbolizer(fill, stroke); break; default: throw new IllegalStateException("Unknown layer type " + type); } return symbolizer; } }
public class class_name { public static SymbolizerTypeInfo createSymbolizer(LayerType type, FeatureStyleInfo featureStyle) { SymbolInfo symbol = featureStyle.getSymbol(); SymbolizerTypeInfo symbolizer = null; StrokeInfo stroke = createStroke(featureStyle.getStrokeColor(), featureStyle.getStrokeWidth(), featureStyle.getStrokeOpacity(), featureStyle.getDashArray()); FillInfo fill = createFill(featureStyle.getFillColor(), featureStyle.getFillOpacity()); switch (type) { case GEOMETRY: break; case LINESTRING: case MULTILINESTRING: symbolizer = createLineSymbolizer(stroke); break; case MULTIPOINT: case POINT: GraphicInfo graphic; if (symbol.getCircle() != null) { MarkInfo circle = createMark(WKN_CIRCLE, fill, stroke); graphic = createGraphic(circle, (int) (2 * symbol.getCircle().getR())); // depends on control dependency: [if], data = [none] } else if (symbol.getRect() != null) { MarkInfo rect = createMark(WKN_RECT, fill, stroke); graphic = createGraphic(rect, (int) symbol.getRect().getH()); // depends on control dependency: [if], data = [none] } else { ExternalGraphicInfo image = createExternalGraphic(symbol.getImage().getHref()); graphic = createGraphic(image, symbol.getImage().getHeight()); // depends on control dependency: [if], data = [none] } symbolizer = createPointSymbolizer(graphic); break; case POLYGON: case MULTIPOLYGON: symbolizer = createPolygonSymbolizer(fill, stroke); break; default: throw new IllegalStateException("Unknown layer type " + type); } return symbolizer; } }
public class class_name { public static Iterable<MutableLongTuple> mooreNeighborhoodIterable( LongTuple center, final int radius, LongTuple min, LongTuple max, Order order) { Objects.requireNonNull(order, "The order is null"); if (min != null) { Utils.checkForEqualSize(center, min); } if (max != null) { Utils.checkForEqualSize(center, max); } final LongTuple localCenter = LongTuples.copy(center); final LongTuple localMin = min == null ? null : LongTuples.copy(min); final LongTuple localMax = max == null ? null : LongTuples.copy(max); return () -> LongTupleNeighborhoodIterators.mooreNeighborhoodIterator( localCenter, radius, localMin, localMax, order); } }
public class class_name { public static Iterable<MutableLongTuple> mooreNeighborhoodIterable( LongTuple center, final int radius, LongTuple min, LongTuple max, Order order) { Objects.requireNonNull(order, "The order is null"); if (min != null) { Utils.checkForEqualSize(center, min); // depends on control dependency: [if], data = [none] } if (max != null) { Utils.checkForEqualSize(center, max); // depends on control dependency: [if], data = [none] } final LongTuple localCenter = LongTuples.copy(center); final LongTuple localMin = min == null ? null : LongTuples.copy(min); final LongTuple localMax = max == null ? null : LongTuples.copy(max); return () -> LongTupleNeighborhoodIterators.mooreNeighborhoodIterator( localCenter, radius, localMin, localMax, order); } }
public class class_name { public void removeVertex(AtlasVertex vertex) { String vertexString = null; if (LOG.isDebugEnabled()) { vertexString = string(vertex); LOG.debug("Removing {}", vertexString); } graph.removeVertex(vertex); if (LOG.isDebugEnabled()) { LOG.info("Removed {}", vertexString); } } }
public class class_name { public void removeVertex(AtlasVertex vertex) { String vertexString = null; if (LOG.isDebugEnabled()) { vertexString = string(vertex); // depends on control dependency: [if], data = [none] LOG.debug("Removing {}", vertexString); // depends on control dependency: [if], data = [none] } graph.removeVertex(vertex); if (LOG.isDebugEnabled()) { LOG.info("Removed {}", vertexString); // depends on control dependency: [if], data = [none] } } }
public class class_name { public PoolAddOptions withOcpDate(DateTime ocpDate) { if (ocpDate == null) { this.ocpDate = null; } else { this.ocpDate = new DateTimeRfc1123(ocpDate); } return this; } }
public class class_name { public PoolAddOptions withOcpDate(DateTime ocpDate) { if (ocpDate == null) { this.ocpDate = null; // depends on control dependency: [if], data = [none] } else { this.ocpDate = new DateTimeRfc1123(ocpDate); // depends on control dependency: [if], data = [(ocpDate] } return this; } }
public class class_name { public static boolean allowParamsOnFormAction( ServletContext servletContext, ServletRequest request ) { ArrayList/*< URLRewriter >*/ rewriters = getRewriters( request ); if ( rewriters != null ) { for ( Iterator i = rewriters.iterator(); i.hasNext(); ) { URLRewriter rewriter = ( URLRewriter ) i.next(); if ( !rewriter.allowParamsOnFormAction( servletContext, request ) ) { return false; } } } return true; } }
public class class_name { public static boolean allowParamsOnFormAction( ServletContext servletContext, ServletRequest request ) { ArrayList/*< URLRewriter >*/ rewriters = getRewriters( request ); if ( rewriters != null ) { for ( Iterator i = rewriters.iterator(); i.hasNext(); ) { URLRewriter rewriter = ( URLRewriter ) i.next(); if ( !rewriter.allowParamsOnFormAction( servletContext, request ) ) { return false; } // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { public void marshall(GetDiscoverySummaryRequest getDiscoverySummaryRequest, ProtocolMarshaller protocolMarshaller) { if (getDiscoverySummaryRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetDiscoverySummaryRequest getDiscoverySummaryRequest, ProtocolMarshaller protocolMarshaller) { if (getDiscoverySummaryRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static boolean isSbeCSharpName(final String value) { if (possibleCSharpKeyword(value)) { if (isCSharpKeyword(value)) { return false; } } else { return false; } return true; } }
public class class_name { public static boolean isSbeCSharpName(final String value) { if (possibleCSharpKeyword(value)) { if (isCSharpKeyword(value)) { return false; // depends on control dependency: [if], data = [none] } } else { return false; // depends on control dependency: [if], data = [none] } return true; } }
public class class_name { private Map<WComponent, Stat> createWCTreeStats(final WComponent root) { Map<WComponent, Stat> statsMap = new HashMap<>(); UIContextHolder.pushContext(uic); try { addStats(statsMap, root); } finally { UIContextHolder.popContext(); } return statsMap; } }
public class class_name { private Map<WComponent, Stat> createWCTreeStats(final WComponent root) { Map<WComponent, Stat> statsMap = new HashMap<>(); UIContextHolder.pushContext(uic); try { addStats(statsMap, root); // depends on control dependency: [try], data = [none] } finally { UIContextHolder.popContext(); } return statsMap; } }
public class class_name { public static Class getFieldType(ClassNode node, String fieldName) { while (node != null) { for (FieldNode field: node.getFields()) { if (field.getName().equals(fieldName)) { return getFieldType(field); } } node = node.getOuterClass(); } return null; } }
public class class_name { public static Class getFieldType(ClassNode node, String fieldName) { while (node != null) { for (FieldNode field: node.getFields()) { if (field.getName().equals(fieldName)) { return getFieldType(field); // depends on control dependency: [if], data = [none] } } node = node.getOuterClass(); } return null; } }
public class class_name { @Override public final String getAsString(final FacesContext context, final UIComponent component, final Object value) { String strValue = null; String symbol = ""; try { strValue = (String) value; symbol = (String) component.getAttributes() .get(TieConstants.CELL_DATA_SYMBOL); if ((symbol != null) && (symbol .equals(TieConstants.CELL_FORMAT_PERCENTAGE_SYMBOL)) && (value != null) && !((String) value).isEmpty()) { Double doubleValue = Double.valueOf((String) value) * TieConstants.CELL_FORMAT_PERCENTAGE_VALUE; strValue = fmtNumber(doubleValue) + TieConstants.CELL_FORMAT_PERCENTAGE_SYMBOL; } } catch (Exception ex) { LOG.log(Level.SEVERE, "error in getAsString of TieSheetNumberConverter : " + ex.getLocalizedMessage(), ex); } return strValue; } }
public class class_name { @Override public final String getAsString(final FacesContext context, final UIComponent component, final Object value) { String strValue = null; String symbol = ""; try { strValue = (String) value; // depends on control dependency: [try], data = [none] symbol = (String) component.getAttributes() .get(TieConstants.CELL_DATA_SYMBOL); // depends on control dependency: [try], data = [none] if ((symbol != null) && (symbol .equals(TieConstants.CELL_FORMAT_PERCENTAGE_SYMBOL)) && (value != null) && !((String) value).isEmpty()) { Double doubleValue = Double.valueOf((String) value) * TieConstants.CELL_FORMAT_PERCENTAGE_VALUE; strValue = fmtNumber(doubleValue) + TieConstants.CELL_FORMAT_PERCENTAGE_SYMBOL; // depends on control dependency: [if], data = [none] } } catch (Exception ex) { LOG.log(Level.SEVERE, "error in getAsString of TieSheetNumberConverter : " + ex.getLocalizedMessage(), ex); } // depends on control dependency: [catch], data = [none] return strValue; } }
public class class_name { public void validateModel(DomDocument document) { Schema schema = getSchema(document); if (schema == null) { return; } Validator validator = schema.newValidator(); try { synchronized(document) { validator.validate(document.getDomSource()); } } catch (IOException e) { throw new ModelValidationException("Error during DOM document validation", e); } catch (SAXException e) { throw new ModelValidationException("DOM document is not valid", e); } } }
public class class_name { public void validateModel(DomDocument document) { Schema schema = getSchema(document); if (schema == null) { return; // depends on control dependency: [if], data = [none] } Validator validator = schema.newValidator(); try { synchronized(document) { // depends on control dependency: [try], data = [none] validator.validate(document.getDomSource()); } } catch (IOException e) { throw new ModelValidationException("Error during DOM document validation", e); } catch (SAXException e) { // depends on control dependency: [catch], data = [none] throw new ModelValidationException("DOM document is not valid", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean isIncluded(String url) { if (url == null) { return false; } if (url.indexOf("?") > 0) { // Strip off any parameters url = url.substring(0, url.indexOf("?")); } for (Pattern p : this.includeInPatterns) { if (p.matcher(url).matches()) { return true; } } return false; } }
public class class_name { public boolean isIncluded(String url) { if (url == null) { return false; // depends on control dependency: [if], data = [none] } if (url.indexOf("?") > 0) { // Strip off any parameters url = url.substring(0, url.indexOf("?")); // depends on control dependency: [if], data = [none] } for (Pattern p : this.includeInPatterns) { if (p.matcher(url).matches()) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public <O> CursorList<O> transform(Function<? super J, ? extends O> function, Predicate<? super O> predicate) { Collection<O> result = null; if (function != null) { result = Lists.transform(this.data, function); } else { result = (List) this.data; } if (predicate != null) { result = Collections2.filter(result, predicate); } // copy the source data, since "live" collections are incompatible with paged results. ArrayList<O> dataCopy = Lists.newArrayList(result); CursorList<O> copy = new CursorList<O>(dataCopy, this.cursor); return copy; } }
public class class_name { public <O> CursorList<O> transform(Function<? super J, ? extends O> function, Predicate<? super O> predicate) { Collection<O> result = null; if (function != null) { result = Lists.transform(this.data, function); // depends on control dependency: [if], data = [none] } else { result = (List) this.data; // depends on control dependency: [if], data = [none] } if (predicate != null) { result = Collections2.filter(result, predicate); // depends on control dependency: [if], data = [none] } // copy the source data, since "live" collections are incompatible with paged results. ArrayList<O> dataCopy = Lists.newArrayList(result); CursorList<O> copy = new CursorList<O>(dataCopy, this.cursor); return copy; } }
public class class_name { private static Xpp3Dom getPluginConfigurationDom(MavenProject project, String pluginId) { Plugin plugin = project.getBuild().getPluginsAsMap().get(pluginId); if (plugin != null) { return (Xpp3Dom) plugin.getConfiguration(); } return null; } }
public class class_name { private static Xpp3Dom getPluginConfigurationDom(MavenProject project, String pluginId) { Plugin plugin = project.getBuild().getPluginsAsMap().get(pluginId); if (plugin != null) { return (Xpp3Dom) plugin.getConfiguration(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public void decode(byte data[], OutputStream uncompData) { if(data[0] == (byte)0x00 && data[1] == (byte)0x01) { throw new RuntimeException("LZW flavour not supported."); } initializeStringTable(); this.data = data; this.uncompData = uncompData; // Initialize pointers bytePointer = 0; bitPointer = 0; nextData = 0; nextBits = 0; int code, oldCode = 0; byte string[]; while ((code = getNextCode()) != 257) { if (code == 256) { initializeStringTable(); code = getNextCode(); if (code == 257) { break; } writeString(stringTable[code]); oldCode = code; } else { if (code < tableIndex) { string = stringTable[code]; writeString(string); addStringToTable(stringTable[oldCode], string[0]); oldCode = code; } else { string = stringTable[oldCode]; string = composeString(string, string[0]); writeString(string); addStringToTable(string); oldCode = code; } } } } }
public class class_name { public void decode(byte data[], OutputStream uncompData) { if(data[0] == (byte)0x00 && data[1] == (byte)0x01) { throw new RuntimeException("LZW flavour not supported."); } initializeStringTable(); this.data = data; this.uncompData = uncompData; // Initialize pointers bytePointer = 0; bitPointer = 0; nextData = 0; nextBits = 0; int code, oldCode = 0; byte string[]; while ((code = getNextCode()) != 257) { if (code == 256) { initializeStringTable(); // depends on control dependency: [if], data = [none] code = getNextCode(); // depends on control dependency: [if], data = [none] if (code == 257) { break; } writeString(stringTable[code]); // depends on control dependency: [if], data = [none] oldCode = code; // depends on control dependency: [if], data = [none] } else { if (code < tableIndex) { string = stringTable[code]; // depends on control dependency: [if], data = [none] writeString(string); // depends on control dependency: [if], data = [none] addStringToTable(stringTable[oldCode], string[0]); // depends on control dependency: [if], data = [none] oldCode = code; // depends on control dependency: [if], data = [none] } else { string = stringTable[oldCode]; // depends on control dependency: [if], data = [none] string = composeString(string, string[0]); // depends on control dependency: [if], data = [none] writeString(string); // depends on control dependency: [if], data = [none] addStringToTable(string); // depends on control dependency: [if], data = [none] oldCode = code; // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public boolean isCodeInCC(Encoding enc, int code) { int len; if (enc.minLength() > 1) { len = 2; } else { len = enc.codeToMbcLength(code); } return isCodeInCCLength(len, code); } }
public class class_name { public boolean isCodeInCC(Encoding enc, int code) { int len; if (enc.minLength() > 1) { len = 2; // depends on control dependency: [if], data = [none] } else { len = enc.codeToMbcLength(code); // depends on control dependency: [if], data = [none] } return isCodeInCCLength(len, code); } }
public class class_name { private void createParentChildRelationships(Database db, HashMap<String, Account> descriptionMap, HashMap<String, ArrayList<String>> seqMap) throws Exception { // List of ID's used to avoid recursion ArrayList<String> parentIdStack = new ArrayList<String>(); // Verify the root node exists if (!seqMap.containsKey(Account.ROOT_ACCOUNT_URI)) throw new Exception("File does not contain the root account, '" + Account.ROOT_ACCOUNT_URI + "'"); parentIdStack.add(Account.ROOT_ACCOUNT_URI); // Until we run out of parent nodes... while (parentIdStack.size() > 0) { String parentId = parentIdStack.get(0); Account parentAccount = descriptionMap.get(parentId); parentIdStack.remove(0); // Attempt to add the parent node if it's not the root. Root already exists // in the database by default. if (parentId.compareTo(Account.ROOT_ACCOUNT_URI) != 0) { if (parentAccount != null) { // If the parent node is not already in the db, add it if (db.findAccountById(parentId) == null) { Account parentParentAccount = db.findParent(parentAccount); if (parentParentAccount == null) { logger.warning("SeqNode[" + parentId + "] does not have a parent, will be dropped"); parentAccount = null; } } } else { logger.warning("SeqNode[" + parentId + "] does not have a matching RDF:Description node, it will be dropped"); } } else { parentAccount = db.getRootAccount(); } // Now add the children if (parentAccount != null) { for (String childId : seqMap.get(parentId)) { Account childAccount = descriptionMap.get(childId); if (childAccount != null) { if (!parentAccount.hasChild(childAccount)) { parentAccount.getChildren().add(childAccount); // If the child has children, add it to the parentIdStack for later processing, also mark // it as a folder (which should have been done already based on it not having an algorithm. if (seqMap.containsKey(childAccount.getId())) { parentIdStack.add(childId); childAccount.setIsFolder(true); } } else { logger.warning("Duplicate child '" + childId + "' found of parent '" + parentAccount.getId() + "'"); } } else { logger.warning("Cannot find RDF:Description for '" + childId + "', it will be dropped"); } } } } } }
public class class_name { private void createParentChildRelationships(Database db, HashMap<String, Account> descriptionMap, HashMap<String, ArrayList<String>> seqMap) throws Exception { // List of ID's used to avoid recursion ArrayList<String> parentIdStack = new ArrayList<String>(); // Verify the root node exists if (!seqMap.containsKey(Account.ROOT_ACCOUNT_URI)) throw new Exception("File does not contain the root account, '" + Account.ROOT_ACCOUNT_URI + "'"); parentIdStack.add(Account.ROOT_ACCOUNT_URI); // Until we run out of parent nodes... while (parentIdStack.size() > 0) { String parentId = parentIdStack.get(0); Account parentAccount = descriptionMap.get(parentId); parentIdStack.remove(0); // Attempt to add the parent node if it's not the root. Root already exists // in the database by default. if (parentId.compareTo(Account.ROOT_ACCOUNT_URI) != 0) { if (parentAccount != null) { // If the parent node is not already in the db, add it if (db.findAccountById(parentId) == null) { Account parentParentAccount = db.findParent(parentAccount); if (parentParentAccount == null) { logger.warning("SeqNode[" + parentId + "] does not have a parent, will be dropped"); // depends on control dependency: [if], data = [none] parentAccount = null; // depends on control dependency: [if], data = [none] } } } else { logger.warning("SeqNode[" + parentId + "] does not have a matching RDF:Description node, it will be dropped"); // depends on control dependency: [if], data = [none] } } else { parentAccount = db.getRootAccount(); } // Now add the children if (parentAccount != null) { for (String childId : seqMap.get(parentId)) { Account childAccount = descriptionMap.get(childId); if (childAccount != null) { if (!parentAccount.hasChild(childAccount)) { parentAccount.getChildren().add(childAccount); // depends on control dependency: [if], data = [none] // If the child has children, add it to the parentIdStack for later processing, also mark // it as a folder (which should have been done already based on it not having an algorithm. if (seqMap.containsKey(childAccount.getId())) { parentIdStack.add(childId); // depends on control dependency: [if], data = [none] childAccount.setIsFolder(true); // depends on control dependency: [if], data = [none] } } else { logger.warning("Duplicate child '" + childId + "' found of parent '" + parentAccount.getId() + "'"); // depends on control dependency: [if], data = [none] } } else { logger.warning("Cannot find RDF:Description for '" + childId + "', it will be dropped"); // depends on control dependency: [if], data = [none] } } } } } }
public class class_name { protected void ensureExecutionInitialized() { if (execution == null && executionId != null) { execution = Context .getCommandContext() .getExecutionManager() .findExecutionById(executionId); } } }
public class class_name { protected void ensureExecutionInitialized() { if (execution == null && executionId != null) { execution = Context .getCommandContext() .getExecutionManager() .findExecutionById(executionId); // depends on control dependency: [if], data = [none] } } }
public class class_name { private String checkSegments(String key, Set<String> corpora, CorpusConfigMap corpusConfigurations) { String segmentation = null; for (String corpus : corpora) { CorpusConfig c = null; if (corpusConfigurations.containsConfig(corpus)) { c = corpusConfigurations.get(corpus); } else { c = corpusConfigurations.get(DEFAULT_CONFIG); } // do nothing if not even default config is set if (c == null) { continue; } String tmpSegment = c.getConfig(key); /** * If no segment is set in the corpus config use always the tok segment. */ if (tmpSegment == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } if (segmentation == null) { segmentation = tmpSegment; continue; } if (!segmentation.equals(tmpSegment)) // return the default config { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } } if (segmentation == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); } else { return segmentation; } } }
public class class_name { private String checkSegments(String key, Set<String> corpora, CorpusConfigMap corpusConfigurations) { String segmentation = null; for (String corpus : corpora) { CorpusConfig c = null; if (corpusConfigurations.containsConfig(corpus)) { c = corpusConfigurations.get(corpus); // depends on control dependency: [if], data = [none] } else { c = corpusConfigurations.get(DEFAULT_CONFIG); // depends on control dependency: [if], data = [none] } // do nothing if not even default config is set if (c == null) { continue; } String tmpSegment = c.getConfig(key); /** * If no segment is set in the corpus config use always the tok segment. */ if (tmpSegment == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); // depends on control dependency: [if], data = [none] } if (segmentation == null) { segmentation = tmpSegment; // depends on control dependency: [if], data = [none] continue; } if (!segmentation.equals(tmpSegment)) // return the default config { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); // depends on control dependency: [if], data = [none] } } if (segmentation == null) { return corpusConfigurations.get(DEFAULT_CONFIG).getConfig(key); // depends on control dependency: [if], data = [none] } else { return segmentation; // depends on control dependency: [if], data = [none] } } }
public class class_name { public final EObject ruleXRelationalExpression() throws RecognitionException { EObject current = null; Token otherlv_2=null; EObject this_XOtherOperatorExpression_0 = null; EObject lv_type_3_0 = null; EObject lv_rightOperand_6_0 = null; enterRule(); try { // InternalXbaseWithAnnotations.g:1194:2: ( (this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* ) ) // InternalXbaseWithAnnotations.g:1195:2: (this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* ) { // InternalXbaseWithAnnotations.g:1195:2: (this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* ) // InternalXbaseWithAnnotations.g:1196:3: this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getXOtherOperatorExpressionParserRuleCall_0()); } pushFollow(FOLLOW_21); this_XOtherOperatorExpression_0=ruleXOtherOperatorExpression(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { current = this_XOtherOperatorExpression_0; afterParserOrEnumRuleCall(); } // InternalXbaseWithAnnotations.g:1204:3: ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* loop21: do { int alt21=3; switch ( input.LA(1) ) { case 26: { int LA21_2 = input.LA(2); if ( (synpred12_InternalXbaseWithAnnotations()) ) { alt21=2; } } break; case 27: { int LA21_3 = input.LA(2); if ( (synpred12_InternalXbaseWithAnnotations()) ) { alt21=2; } } break; case 35: { int LA21_4 = input.LA(2); if ( (synpred11_InternalXbaseWithAnnotations()) ) { alt21=1; } } break; case 28: { int LA21_5 = input.LA(2); if ( (synpred12_InternalXbaseWithAnnotations()) ) { alt21=2; } } break; } switch (alt21) { case 1 : // InternalXbaseWithAnnotations.g:1205:4: ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) { // InternalXbaseWithAnnotations.g:1205:4: ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) // InternalXbaseWithAnnotations.g:1206:5: ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) { // InternalXbaseWithAnnotations.g:1206:5: ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) // InternalXbaseWithAnnotations.g:1207:6: ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) { // InternalXbaseWithAnnotations.g:1213:6: ( () otherlv_2= 'instanceof' ) // InternalXbaseWithAnnotations.g:1214:7: () otherlv_2= 'instanceof' { // InternalXbaseWithAnnotations.g:1214:7: () // InternalXbaseWithAnnotations.g:1215:8: { if ( state.backtracking==0 ) { current = forceCreateModelElementAndSet( grammarAccess.getXRelationalExpressionAccess().getXInstanceOfExpressionExpressionAction_1_0_0_0_0(), current); } } otherlv_2=(Token)match(input,35,FOLLOW_22); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_2, grammarAccess.getXRelationalExpressionAccess().getInstanceofKeyword_1_0_0_0_1()); } } } // InternalXbaseWithAnnotations.g:1227:5: ( (lv_type_3_0= ruleJvmTypeReference ) ) // InternalXbaseWithAnnotations.g:1228:6: (lv_type_3_0= ruleJvmTypeReference ) { // InternalXbaseWithAnnotations.g:1228:6: (lv_type_3_0= ruleJvmTypeReference ) // InternalXbaseWithAnnotations.g:1229:7: lv_type_3_0= ruleJvmTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getTypeJvmTypeReferenceParserRuleCall_1_0_1_0()); } pushFollow(FOLLOW_21); lv_type_3_0=ruleJvmTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getXRelationalExpressionRule()); } set( current, "type", lv_type_3_0, "org.eclipse.xtext.xbase.Xtype.JvmTypeReference"); afterParserOrEnumRuleCall(); } } } } } break; case 2 : // InternalXbaseWithAnnotations.g:1248:4: ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) { // InternalXbaseWithAnnotations.g:1248:4: ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) // InternalXbaseWithAnnotations.g:1249:5: ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) { // InternalXbaseWithAnnotations.g:1249:5: ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) // InternalXbaseWithAnnotations.g:1250:6: ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) { // InternalXbaseWithAnnotations.g:1260:6: ( () ( ( ruleOpCompare ) ) ) // InternalXbaseWithAnnotations.g:1261:7: () ( ( ruleOpCompare ) ) { // InternalXbaseWithAnnotations.g:1261:7: () // InternalXbaseWithAnnotations.g:1262:8: { if ( state.backtracking==0 ) { current = forceCreateModelElementAndSet( grammarAccess.getXRelationalExpressionAccess().getXBinaryOperationLeftOperandAction_1_1_0_0_0(), current); } } // InternalXbaseWithAnnotations.g:1268:7: ( ( ruleOpCompare ) ) // InternalXbaseWithAnnotations.g:1269:8: ( ruleOpCompare ) { // InternalXbaseWithAnnotations.g:1269:8: ( ruleOpCompare ) // InternalXbaseWithAnnotations.g:1270:9: ruleOpCompare { if ( state.backtracking==0 ) { if (current==null) { current = createModelElement(grammarAccess.getXRelationalExpressionRule()); } } if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getFeatureJvmIdentifiableElementCrossReference_1_1_0_0_1_0()); } pushFollow(FOLLOW_9); ruleOpCompare(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { afterParserOrEnumRuleCall(); } } } } } // InternalXbaseWithAnnotations.g:1286:5: ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) // InternalXbaseWithAnnotations.g:1287:6: (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) { // InternalXbaseWithAnnotations.g:1287:6: (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) // InternalXbaseWithAnnotations.g:1288:7: lv_rightOperand_6_0= ruleXOtherOperatorExpression { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getRightOperandXOtherOperatorExpressionParserRuleCall_1_1_1_0()); } pushFollow(FOLLOW_21); lv_rightOperand_6_0=ruleXOtherOperatorExpression(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getXRelationalExpressionRule()); } set( current, "rightOperand", lv_rightOperand_6_0, "org.eclipse.xtext.xbase.Xbase.XOtherOperatorExpression"); afterParserOrEnumRuleCall(); } } } } } break; default : break loop21; } } while (true); } } if ( state.backtracking==0 ) { leaveRule(); } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { public final EObject ruleXRelationalExpression() throws RecognitionException { EObject current = null; Token otherlv_2=null; EObject this_XOtherOperatorExpression_0 = null; EObject lv_type_3_0 = null; EObject lv_rightOperand_6_0 = null; enterRule(); try { // InternalXbaseWithAnnotations.g:1194:2: ( (this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* ) ) // InternalXbaseWithAnnotations.g:1195:2: (this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* ) { // InternalXbaseWithAnnotations.g:1195:2: (this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* ) // InternalXbaseWithAnnotations.g:1196:3: this_XOtherOperatorExpression_0= ruleXOtherOperatorExpression ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getXOtherOperatorExpressionParserRuleCall_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_21); this_XOtherOperatorExpression_0=ruleXOtherOperatorExpression(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { current = this_XOtherOperatorExpression_0; // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } // InternalXbaseWithAnnotations.g:1204:3: ( ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) | ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) )* loop21: do { int alt21=3; switch ( input.LA(1) ) { case 26: { int LA21_2 = input.LA(2); if ( (synpred12_InternalXbaseWithAnnotations()) ) { alt21=2; // depends on control dependency: [if], data = [none] } } break; case 27: { int LA21_3 = input.LA(2); if ( (synpred12_InternalXbaseWithAnnotations()) ) { alt21=2; // depends on control dependency: [if], data = [none] } } break; case 35: { int LA21_4 = input.LA(2); if ( (synpred11_InternalXbaseWithAnnotations()) ) { alt21=1; // depends on control dependency: [if], data = [none] } } break; case 28: { int LA21_5 = input.LA(2); if ( (synpred12_InternalXbaseWithAnnotations()) ) { alt21=2; // depends on control dependency: [if], data = [none] } } break; } switch (alt21) { case 1 : // InternalXbaseWithAnnotations.g:1205:4: ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) { // InternalXbaseWithAnnotations.g:1205:4: ( ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) ) // InternalXbaseWithAnnotations.g:1206:5: ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) ( (lv_type_3_0= ruleJvmTypeReference ) ) { // InternalXbaseWithAnnotations.g:1206:5: ( ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) ) // InternalXbaseWithAnnotations.g:1207:6: ( ( () 'instanceof' ) )=> ( () otherlv_2= 'instanceof' ) { // InternalXbaseWithAnnotations.g:1213:6: ( () otherlv_2= 'instanceof' ) // InternalXbaseWithAnnotations.g:1214:7: () otherlv_2= 'instanceof' { // InternalXbaseWithAnnotations.g:1214:7: () // InternalXbaseWithAnnotations.g:1215:8: { if ( state.backtracking==0 ) { current = forceCreateModelElementAndSet( grammarAccess.getXRelationalExpressionAccess().getXInstanceOfExpressionExpressionAction_1_0_0_0_0(), current); // depends on control dependency: [if], data = [none] } } otherlv_2=(Token)match(input,35,FOLLOW_22); if (state.failed) return current; if ( state.backtracking==0 ) { newLeafNode(otherlv_2, grammarAccess.getXRelationalExpressionAccess().getInstanceofKeyword_1_0_0_0_1()); // depends on control dependency: [if], data = [none] } } } // InternalXbaseWithAnnotations.g:1227:5: ( (lv_type_3_0= ruleJvmTypeReference ) ) // InternalXbaseWithAnnotations.g:1228:6: (lv_type_3_0= ruleJvmTypeReference ) { // InternalXbaseWithAnnotations.g:1228:6: (lv_type_3_0= ruleJvmTypeReference ) // InternalXbaseWithAnnotations.g:1229:7: lv_type_3_0= ruleJvmTypeReference { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getTypeJvmTypeReferenceParserRuleCall_1_0_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_21); lv_type_3_0=ruleJvmTypeReference(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getXRelationalExpressionRule()); // depends on control dependency: [if], data = [none] } set( current, "type", lv_type_3_0, "org.eclipse.xtext.xbase.Xtype.JvmTypeReference"); // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } } } break; case 2 : // InternalXbaseWithAnnotations.g:1248:4: ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) { // InternalXbaseWithAnnotations.g:1248:4: ( ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) ) // InternalXbaseWithAnnotations.g:1249:5: ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) { // InternalXbaseWithAnnotations.g:1249:5: ( ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) ) // InternalXbaseWithAnnotations.g:1250:6: ( ( () ( ( ruleOpCompare ) ) ) )=> ( () ( ( ruleOpCompare ) ) ) { // InternalXbaseWithAnnotations.g:1260:6: ( () ( ( ruleOpCompare ) ) ) // InternalXbaseWithAnnotations.g:1261:7: () ( ( ruleOpCompare ) ) { // InternalXbaseWithAnnotations.g:1261:7: () // InternalXbaseWithAnnotations.g:1262:8: { if ( state.backtracking==0 ) { current = forceCreateModelElementAndSet( grammarAccess.getXRelationalExpressionAccess().getXBinaryOperationLeftOperandAction_1_1_0_0_0(), current); // depends on control dependency: [if], data = [none] } } // InternalXbaseWithAnnotations.g:1268:7: ( ( ruleOpCompare ) ) // InternalXbaseWithAnnotations.g:1269:8: ( ruleOpCompare ) { // InternalXbaseWithAnnotations.g:1269:8: ( ruleOpCompare ) // InternalXbaseWithAnnotations.g:1270:9: ruleOpCompare { if ( state.backtracking==0 ) { if (current==null) { current = createModelElement(grammarAccess.getXRelationalExpressionRule()); // depends on control dependency: [if], data = [none] } } if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getFeatureJvmIdentifiableElementCrossReference_1_1_0_0_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_9); ruleOpCompare(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } } } // InternalXbaseWithAnnotations.g:1286:5: ( (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) ) // InternalXbaseWithAnnotations.g:1287:6: (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) { // InternalXbaseWithAnnotations.g:1287:6: (lv_rightOperand_6_0= ruleXOtherOperatorExpression ) // InternalXbaseWithAnnotations.g:1288:7: lv_rightOperand_6_0= ruleXOtherOperatorExpression { if ( state.backtracking==0 ) { newCompositeNode(grammarAccess.getXRelationalExpressionAccess().getRightOperandXOtherOperatorExpressionParserRuleCall_1_1_1_0()); // depends on control dependency: [if], data = [none] } pushFollow(FOLLOW_21); lv_rightOperand_6_0=ruleXOtherOperatorExpression(); state._fsp--; if (state.failed) return current; if ( state.backtracking==0 ) { if (current==null) { current = createModelElementForParent(grammarAccess.getXRelationalExpressionRule()); // depends on control dependency: [if], data = [none] } set( current, "rightOperand", lv_rightOperand_6_0, "org.eclipse.xtext.xbase.Xbase.XOtherOperatorExpression"); // depends on control dependency: [if], data = [none] afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none] } } } } } break; default : break loop21; } } while (true); } } if ( state.backtracking==0 ) { leaveRule(); // depends on control dependency: [if], data = [none] } } catch (RecognitionException re) { recover(input,re); appendSkippedTokens(); } finally { } return current; } }
public class class_name { @Override protected void handleEvents(final String EVENT_TYPE) { super.handleEvents(EVENT_TYPE); if ("VISIBILITY".equals(EVENT_TYPE)) { Helper.enableNode(titleText, !tile.getTitle().isEmpty()); Helper.enableNode(text, tile.isTextVisible()); webView.setMaxSize(size * 0.9, tile.isTextVisible() ? size * 0.68 : size * 0.795); webView.setPrefSize(size * 0.9, tile.isTextVisible() ? size * 0.68 : size * 0.795); } else if ("LOCATION".equals(EVENT_TYPE)) { tile.getCurrentLocation().addLocationEventListener(locationListener); updateLocation(); } else if ("TRACK".equals(EVENT_TYPE)) { addTrack(tile.getTrack()); } else if ("MAP_PROVIDER".equals(EVENT_TYPE)) { changeMapProvider(tile.getMapProvider()); } } }
public class class_name { @Override protected void handleEvents(final String EVENT_TYPE) { super.handleEvents(EVENT_TYPE); if ("VISIBILITY".equals(EVENT_TYPE)) { Helper.enableNode(titleText, !tile.getTitle().isEmpty()); // depends on control dependency: [if], data = [none] Helper.enableNode(text, tile.isTextVisible()); // depends on control dependency: [if], data = [none] webView.setMaxSize(size * 0.9, tile.isTextVisible() ? size * 0.68 : size * 0.795); // depends on control dependency: [if], data = [none] webView.setPrefSize(size * 0.9, tile.isTextVisible() ? size * 0.68 : size * 0.795); // depends on control dependency: [if], data = [none] } else if ("LOCATION".equals(EVENT_TYPE)) { tile.getCurrentLocation().addLocationEventListener(locationListener); // depends on control dependency: [if], data = [none] updateLocation(); // depends on control dependency: [if], data = [none] } else if ("TRACK".equals(EVENT_TYPE)) { addTrack(tile.getTrack()); // depends on control dependency: [if], data = [none] } else if ("MAP_PROVIDER".equals(EVENT_TYPE)) { changeMapProvider(tile.getMapProvider()); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void destroy() throws Exception { for (final InstanceHolder instanceHolder : this.instances.values()) { if (instanceHolder.destructionCallback != null) { try { instanceHolder.destructionCallback.run(); } catch (Exception e) { this.logger.warn( "Destruction callback for bean named '" + instanceHolder.name + "' failed.", e); } } } this.instances.clear(); } }
public class class_name { @Override public void destroy() throws Exception { for (final InstanceHolder instanceHolder : this.instances.values()) { if (instanceHolder.destructionCallback != null) { try { instanceHolder.destructionCallback.run(); // depends on control dependency: [try], data = [none] } catch (Exception e) { this.logger.warn( "Destruction callback for bean named '" + instanceHolder.name + "' failed.", e); } // depends on control dependency: [catch], data = [none] } } this.instances.clear(); } }
public class class_name { private IQTree projectAwayUnnecessaryVariables(IQTree child, IQProperties currentIQProperties) { if (child.getRootNode() instanceof ConstructionNode) { ConstructionNode constructionNode = (ConstructionNode) child.getRootNode(); AscendingSubstitutionNormalization normalization = normalizeAscendingSubstitution( constructionNode.getSubstitution(), projectedVariables); Optional<ConstructionNode> proposedConstructionNode = normalization.generateTopConstructionNode(); if (proposedConstructionNode .filter(c -> c.isSyntacticallyEquivalentTo(constructionNode)) .isPresent()) return child; IQTree grandChild = normalization.normalizeChild(((UnaryIQTree) child).getChild()); return proposedConstructionNode .map(c -> (IQTree) iqFactory.createUnaryIQTree(c, grandChild, currentIQProperties.declareLifted())) .orElse(grandChild); } else return child; } }
public class class_name { private IQTree projectAwayUnnecessaryVariables(IQTree child, IQProperties currentIQProperties) { if (child.getRootNode() instanceof ConstructionNode) { ConstructionNode constructionNode = (ConstructionNode) child.getRootNode(); AscendingSubstitutionNormalization normalization = normalizeAscendingSubstitution( constructionNode.getSubstitution(), projectedVariables); Optional<ConstructionNode> proposedConstructionNode = normalization.generateTopConstructionNode(); if (proposedConstructionNode .filter(c -> c.isSyntacticallyEquivalentTo(constructionNode)) .isPresent()) return child; IQTree grandChild = normalization.normalizeChild(((UnaryIQTree) child).getChild()); return proposedConstructionNode .map(c -> (IQTree) iqFactory.createUnaryIQTree(c, grandChild, currentIQProperties.declareLifted())) .orElse(grandChild); // depends on control dependency: [if], data = [none] } else return child; } }