code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { boolean preQueryIndex(final ZoneIndex index) { boolean success = false; if (index != null) { ZoneInfo info = zones.get(index); if (info != null) { success = true; } else { try { ResponseInfo responseInfo = getZoneJsonSync(index); ZoneInfo info2 = ZoneInfo.buildFromJson(responseInfo.response); zones.put(index, info2); success = true; } catch (JSONException e) { e.printStackTrace(); } } } return success; } }
public class class_name { boolean preQueryIndex(final ZoneIndex index) { boolean success = false; if (index != null) { ZoneInfo info = zones.get(index); if (info != null) { success = true; // depends on control dependency: [if], data = [none] } else { try { ResponseInfo responseInfo = getZoneJsonSync(index); ZoneInfo info2 = ZoneInfo.buildFromJson(responseInfo.response); zones.put(index, info2); // depends on control dependency: [try], data = [none] success = true; // depends on control dependency: [try], data = [none] } catch (JSONException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } } return success; } }
public class class_name { @Override protected Operand createAndExpression(final Operand leftExpression, final Operand rightExpression) { if (leftExpression == null || rightExpression == null) { return null; } final Set<Operand> operands = new HashSet<Operand>(); operands.add(leftExpression); operands.add(rightExpression); return new Operand(Operator.AND, operands); } }
public class class_name { @Override protected Operand createAndExpression(final Operand leftExpression, final Operand rightExpression) { if (leftExpression == null || rightExpression == null) { return null; // depends on control dependency: [if], data = [none] } final Set<Operand> operands = new HashSet<Operand>(); operands.add(leftExpression); operands.add(rightExpression); return new Operand(Operator.AND, operands); } }
public class class_name { public String serialize() { JaxbJsonSerializer<CreateSnapshotBridgeParameters> serializer = new JaxbJsonSerializer<>(CreateSnapshotBridgeParameters.class); try { return serializer.serialize(this); } catch (IOException e) { throw new SnapshotDataException( "Unable to create task result due to: " + e.getMessage()); } } }
public class class_name { public String serialize() { JaxbJsonSerializer<CreateSnapshotBridgeParameters> serializer = new JaxbJsonSerializer<>(CreateSnapshotBridgeParameters.class); try { return serializer.serialize(this); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new SnapshotDataException( "Unable to create task result due to: " + e.getMessage()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public OperationResult operate(TestStep testStep) { String testStepName = testStep.getLocator().getValue(); LogRecord log = LogRecord.info(LOG, testStep, "script.execute", testStepName); current.backup(); TestScript testScript = dao.load(new File(pm.getPageScriptDir(), testStepName), sheetName, false); current.setTestScript(testScript); current.reset(); current.setCurrentIndex(current.getCurrentIndex() - 1); String caseNo = testStep.getValue(); if (testScript.containsCaseNo(caseNo)) { current.setCaseNo(caseNo); } else { String msg = MessageManager.getMessage("case.number.error", caseNo) + testScript.getCaseNoMap().keySet(); throw new TestException(msg); } current.setTestContextListener(this); return new OperationResult(log); } }
public class class_name { @Override public OperationResult operate(TestStep testStep) { String testStepName = testStep.getLocator().getValue(); LogRecord log = LogRecord.info(LOG, testStep, "script.execute", testStepName); current.backup(); TestScript testScript = dao.load(new File(pm.getPageScriptDir(), testStepName), sheetName, false); current.setTestScript(testScript); current.reset(); current.setCurrentIndex(current.getCurrentIndex() - 1); String caseNo = testStep.getValue(); if (testScript.containsCaseNo(caseNo)) { current.setCaseNo(caseNo); // depends on control dependency: [if], data = [none] } else { String msg = MessageManager.getMessage("case.number.error", caseNo) + testScript.getCaseNoMap().keySet(); throw new TestException(msg); } current.setTestContextListener(this); return new OperationResult(log); } }
public class class_name { public void doPick() { GVRSceneObject owner = getOwnerObject(); GVRPickedObject[] picked = pickVisible(mScene); if (mProjection != null) { Matrix4f view_matrix; if (owner != null) { view_matrix = owner.getTransform().getModelMatrix4f(); } else { view_matrix = mScene.getMainCameraRig().getHeadTransform().getModelMatrix4f(); } view_matrix.invert(); for (int i = 0; i < picked.length; ++i) { GVRPickedObject hit = picked[i]; if (hit != null) { GVRSceneObject sceneObj = hit.hitObject; GVRSceneObject.BoundingVolume bv = sceneObj.getBoundingVolume(); Vector4f center = new Vector4f(bv.center.x, bv.center.y, bv.center.z, 1); Vector4f p = new Vector4f(bv.center.x, bv.center.y, bv.center.z + bv.radius, 1); float radius; center.mul(view_matrix); p.mul(view_matrix); p.sub(center, p); p.w = 0; radius = p.length(); if (!mCuller.testSphere(center.x, center.y, center.z, radius)) { picked[i] = null; } } } } generatePickEvents(picked); } }
public class class_name { public void doPick() { GVRSceneObject owner = getOwnerObject(); GVRPickedObject[] picked = pickVisible(mScene); if (mProjection != null) { Matrix4f view_matrix; if (owner != null) { view_matrix = owner.getTransform().getModelMatrix4f(); // depends on control dependency: [if], data = [none] } else { view_matrix = mScene.getMainCameraRig().getHeadTransform().getModelMatrix4f(); // depends on control dependency: [if], data = [none] } view_matrix.invert(); // depends on control dependency: [if], data = [none] for (int i = 0; i < picked.length; ++i) { GVRPickedObject hit = picked[i]; if (hit != null) { GVRSceneObject sceneObj = hit.hitObject; GVRSceneObject.BoundingVolume bv = sceneObj.getBoundingVolume(); Vector4f center = new Vector4f(bv.center.x, bv.center.y, bv.center.z, 1); Vector4f p = new Vector4f(bv.center.x, bv.center.y, bv.center.z + bv.radius, 1); float radius; center.mul(view_matrix); // depends on control dependency: [if], data = [none] p.mul(view_matrix); // depends on control dependency: [if], data = [none] p.sub(center, p); // depends on control dependency: [if], data = [none] p.w = 0; // depends on control dependency: [if], data = [none] radius = p.length(); // depends on control dependency: [if], data = [none] if (!mCuller.testSphere(center.x, center.y, center.z, radius)) { picked[i] = null; // depends on control dependency: [if], data = [none] } } } } generatePickEvents(picked); } }
public class class_name { void setWriter(Writer writer) { // PM12137 - starts if (closed) { if(com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable(Level.FINE)){ logger.logp(Level.FINE, CLASS_NAME, "setWriter", "resetting closed to false for this=["+this+"]"); } closed = false; strBuffer = new StringBuffer(this.bodyContentBuffSize); } // PM12137 - ends this.writer = writer; if (writer != null) { // According to the spec, the JspWriter returned by // JspContext.pushBody(java.io.Writer writer) must behave as // though it were unbuffered. This means that its getBufferSize() // must always return 0. The implementation of // JspWriter.getBufferSize() returns the value of JspWriter's // 'bufferSize' field, which is inherited by this class. // Therefore, we simply save the current 'bufferSize' (so we can // later restore it should this BodyContentImpl ever be reused by // a call to PageContext.pushBody()) before setting it to 0. if (bufferSize != 0) { if(com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable(Level.FINE)){ logger.logp(Level.FINE, CLASS_NAME, "setWriter", "BodyContentImpl setWriter A. bufferSize=["+bufferSize+"] this=["+this+"]"); } bufferSizeSave = bufferSize; bufferSize = 0; } } else { bufferSize = bufferSizeSave; if(com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable(Level.FINE)){ logger.logp(Level.FINE, CLASS_NAME, "setWriter", "BodyContentImpl setWriter B. bufferSize=["+bufferSize+"] this=["+this+"]"); } clearBody(); } } }
public class class_name { void setWriter(Writer writer) { // PM12137 - starts if (closed) { if(com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable(Level.FINE)){ logger.logp(Level.FINE, CLASS_NAME, "setWriter", "resetting closed to false for this=["+this+"]"); // depends on control dependency: [if], data = [none] } closed = false; // depends on control dependency: [if], data = [none] strBuffer = new StringBuffer(this.bodyContentBuffSize); // depends on control dependency: [if], data = [none] } // PM12137 - ends this.writer = writer; if (writer != null) { // According to the spec, the JspWriter returned by // JspContext.pushBody(java.io.Writer writer) must behave as // though it were unbuffered. This means that its getBufferSize() // must always return 0. The implementation of // JspWriter.getBufferSize() returns the value of JspWriter's // 'bufferSize' field, which is inherited by this class. // Therefore, we simply save the current 'bufferSize' (so we can // later restore it should this BodyContentImpl ever be reused by // a call to PageContext.pushBody()) before setting it to 0. if (bufferSize != 0) { if(com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable(Level.FINE)){ logger.logp(Level.FINE, CLASS_NAME, "setWriter", "BodyContentImpl setWriter A. bufferSize=["+bufferSize+"] this=["+this+"]"); // depends on control dependency: [if], data = [none] } bufferSizeSave = bufferSize; // depends on control dependency: [if], data = [none] bufferSize = 0; // depends on control dependency: [if], data = [none] } } else { bufferSize = bufferSizeSave; // depends on control dependency: [if], data = [none] if(com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable(Level.FINE)){ logger.logp(Level.FINE, CLASS_NAME, "setWriter", "BodyContentImpl setWriter B. bufferSize=["+bufferSize+"] this=["+this+"]"); // depends on control dependency: [if], data = [none] } clearBody(); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void addActions(String serviceName, Service service) { if (serviceName == null || serviceName.isEmpty()) { serviceName = service.getName(); } else { service.name = serviceName; } final String name = serviceName.replace(' ', '-'); Class<? extends Service> clazz = service.getClass(); Dependencies dependencies = clazz.getAnnotation(Dependencies.class); if (dependencies != null) { String[] services = dependencies.value(); if (services != null && services.length > 0) { waitForServices(0, Arrays.asList(services)).then(ok -> { StringBuilder msg = new StringBuilder(64); msg.append("Starting \""); msg.append(name); msg.append("\" service because "); for (int i = 0; i < services.length; i++) { msg.append('\"'); msg.append(services[i]); msg.append('\"'); if (i < services.length - 1) { msg.append(", "); } } if (services.length == 1) { msg.append(" service is"); } else { msg.append(" services are"); } msg.append(" available..."); logger.info(msg.toString()); addOnlineActions(name, service); }).catchError(cause -> { logger.error("Unable to deploy service!", cause); }); return; } } addOnlineActions(name, service); } }
public class class_name { @Override public void addActions(String serviceName, Service service) { if (serviceName == null || serviceName.isEmpty()) { serviceName = service.getName(); // depends on control dependency: [if], data = [none] } else { service.name = serviceName; // depends on control dependency: [if], data = [none] } final String name = serviceName.replace(' ', '-'); Class<? extends Service> clazz = service.getClass(); Dependencies dependencies = clazz.getAnnotation(Dependencies.class); if (dependencies != null) { String[] services = dependencies.value(); if (services != null && services.length > 0) { waitForServices(0, Arrays.asList(services)).then(ok -> { StringBuilder msg = new StringBuilder(64); // depends on control dependency: [if], data = [(services] msg.append("Starting \""); // depends on control dependency: [if], data = [none] msg.append(name); // depends on control dependency: [if], data = [none] msg.append("\" service because "); // depends on control dependency: [if], data = [none] for (int i = 0; i < services.length; i++) { msg.append('\"'); // depends on control dependency: [for], data = [none] msg.append(services[i]); // depends on control dependency: [for], data = [i] msg.append('\"'); // depends on control dependency: [for], data = [none] if (i < services.length - 1) { msg.append(", "); // depends on control dependency: [if], data = [none] } } if (services.length == 1) { msg.append(" service is"); // depends on control dependency: [if], data = [none] } else { msg.append(" services are"); // depends on control dependency: [if], data = [none] } msg.append(" available..."); // depends on control dependency: [if], data = [none] logger.info(msg.toString()); // depends on control dependency: [if], data = [none] addOnlineActions(name, service); // depends on control dependency: [if], data = [none] }).catchError(cause -> { logger.error("Unable to deploy service!", cause); }); return; // depends on control dependency: [if], data = [none] } } addOnlineActions(name, service); } }
public class class_name { public static CommandLine buildCommandLine(String executable) { CommandLine cmd; if (SystemUtils.IS_OS_WINDOWS) { String windowsExecutable = executable.replace('/', '\\'); cmd = new CommandLine("cmd") .addArgument("/c") .addArgument(windowsExecutable); } else { cmd = new CommandLine(executable); } return cmd; } }
public class class_name { public static CommandLine buildCommandLine(String executable) { CommandLine cmd; if (SystemUtils.IS_OS_WINDOWS) { String windowsExecutable = executable.replace('/', '\\'); cmd = new CommandLine("cmd") .addArgument("/c") .addArgument(windowsExecutable); // depends on control dependency: [if], data = [none] } else { cmd = new CommandLine(executable); // depends on control dependency: [if], data = [none] } return cmd; } }
public class class_name { public void fixupVariables(java.util.Vector vars, int globalsSize) { if (null != m_parts) { int n = m_parts.size(); for (int i = 0; i < n; i++) { AVTPart part = (AVTPart) m_parts.elementAt(i); part.fixupVariables(vars, globalsSize); } } } }
public class class_name { public void fixupVariables(java.util.Vector vars, int globalsSize) { if (null != m_parts) { int n = m_parts.size(); for (int i = 0; i < n; i++) { AVTPart part = (AVTPart) m_parts.elementAt(i); part.fixupVariables(vars, globalsSize); // depends on control dependency: [for], data = [none] } } } }
public class class_name { int displayedYear(HistoricDate date) { HistoricEra era = date.getEra(); int yearOfEra = date.getYearOfEra(); int annoDomini = era.annoDomini(yearOfEra); int previous = Integer.MIN_VALUE; for (int i = 0, n = this.strategies.size(); i < n; i++) { NewYearStrategy strategy = this.strategies.get(i); if ((annoDomini >= previous) && (annoDomini < strategy.lastAnnoDomini)) { return strategy.lastRule.displayedYear(this, date); } previous = strategy.lastAnnoDomini; } return this.lastRule.displayedYear(this, date); } }
public class class_name { int displayedYear(HistoricDate date) { HistoricEra era = date.getEra(); int yearOfEra = date.getYearOfEra(); int annoDomini = era.annoDomini(yearOfEra); int previous = Integer.MIN_VALUE; for (int i = 0, n = this.strategies.size(); i < n; i++) { NewYearStrategy strategy = this.strategies.get(i); if ((annoDomini >= previous) && (annoDomini < strategy.lastAnnoDomini)) { return strategy.lastRule.displayedYear(this, date); // depends on control dependency: [if], data = [none] } previous = strategy.lastAnnoDomini; // depends on control dependency: [for], data = [none] } return this.lastRule.displayedYear(this, date); } }
public class class_name { public final Filter grandFilter() throws RecognitionException { Filter filter = null; Token o=null; Filter a =null; Filter b =null; try { // druidG.g:387:2: (a= semiGrandFilter ( WS o= ( AND | OR ) WS b= semiGrandFilter )* ) // druidG.g:387:3: a= semiGrandFilter ( WS o= ( AND | OR ) WS b= semiGrandFilter )* { pushFollow(FOLLOW_semiGrandFilter_in_grandFilter2751); a=semiGrandFilter(); state._fsp--; filter = a; // druidG.g:387:35: ( WS o= ( AND | OR ) WS b= semiGrandFilter )* loop183: while (true) { int alt183=2; int LA183_0 = input.LA(1); if ( (LA183_0==WS) ) { int LA183_1 = input.LA(2); if ( (LA183_1==AND||LA183_1==OR) ) { alt183=1; } } switch (alt183) { case 1 : // druidG.g:387:36: WS o= ( AND | OR ) WS b= semiGrandFilter { match(input,WS,FOLLOW_WS_in_grandFilter2756); o=input.LT(1); if ( input.LA(1)==AND||input.LA(1)==OR ) { input.consume(); state.errorRecovery=false; } else { MismatchedSetException mse = new MismatchedSetException(null,input); throw mse; } match(input,WS,FOLLOW_WS_in_grandFilter2766); pushFollow(FOLLOW_semiGrandFilter_in_grandFilter2770); b=semiGrandFilter(); state._fsp--; Filter tmpFilter = filter; filter = new Filter((o!=null?o.getText():null).toLowerCase()); filter.fields = new ArrayList<>(); filter.fields.add(tmpFilter); if (b != null) { filter.fields.add(b); } } break; default : break loop183; } } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } return filter; } }
public class class_name { public final Filter grandFilter() throws RecognitionException { Filter filter = null; Token o=null; Filter a =null; Filter b =null; try { // druidG.g:387:2: (a= semiGrandFilter ( WS o= ( AND | OR ) WS b= semiGrandFilter )* ) // druidG.g:387:3: a= semiGrandFilter ( WS o= ( AND | OR ) WS b= semiGrandFilter )* { pushFollow(FOLLOW_semiGrandFilter_in_grandFilter2751); a=semiGrandFilter(); state._fsp--; filter = a; // druidG.g:387:35: ( WS o= ( AND | OR ) WS b= semiGrandFilter )* loop183: while (true) { int alt183=2; int LA183_0 = input.LA(1); if ( (LA183_0==WS) ) { int LA183_1 = input.LA(2); if ( (LA183_1==AND||LA183_1==OR) ) { alt183=1; // depends on control dependency: [if], data = [none] } } switch (alt183) { case 1 : // druidG.g:387:36: WS o= ( AND | OR ) WS b= semiGrandFilter { match(input,WS,FOLLOW_WS_in_grandFilter2756); o=input.LT(1); if ( input.LA(1)==AND||input.LA(1)==OR ) { input.consume(); // depends on control dependency: [if], data = [none] state.errorRecovery=false; // depends on control dependency: [if], data = [none] } else { MismatchedSetException mse = new MismatchedSetException(null,input); throw mse; } match(input,WS,FOLLOW_WS_in_grandFilter2766); pushFollow(FOLLOW_semiGrandFilter_in_grandFilter2770); b=semiGrandFilter(); state._fsp--; Filter tmpFilter = filter; filter = new Filter((o!=null?o.getText():null).toLowerCase()); filter.fields = new ArrayList<>(); filter.fields.add(tmpFilter); if (b != null) { filter.fields.add(b); // depends on control dependency: [if], data = [(b] } } break; default : break loop183; } } } } catch (RecognitionException re) { reportError(re); recover(input,re); } finally { // do for sure before leaving } return filter; } }
public class class_name { public static long toLong(byte[] b, int off, boolean littleEndian) { if (littleEndian) { return (toInt(b, off, LITTLE_ENDIAN) & 0xFFFFFFFFL) | ((toInt(b, off + 4, LITTLE_ENDIAN) & 0xFFFFFFFFL) << 32); } return ((toInt(b, off, BIG_ENDIAN) & 0xFFFFFFFFL) << 32) | (toInt(b, off + 4, BIG_ENDIAN) & 0xFFFFFFFFL); } }
public class class_name { public static long toLong(byte[] b, int off, boolean littleEndian) { if (littleEndian) { return (toInt(b, off, LITTLE_ENDIAN) & 0xFFFFFFFFL) | ((toInt(b, off + 4, LITTLE_ENDIAN) & 0xFFFFFFFFL) << 32); // depends on control dependency: [if], data = [none] } return ((toInt(b, off, BIG_ENDIAN) & 0xFFFFFFFFL) << 32) | (toInt(b, off + 4, BIG_ENDIAN) & 0xFFFFFFFFL); } }
public class class_name { public static String getCssCode(BandElement be, Map<String, Object> style, boolean needed) { StringBuilder css = new StringBuilder(); if (needed) { if (style.containsKey(StyleFormatConstants.FONT_FAMILY_KEY)) { String val = (String) style.get(StyleFormatConstants.FONT_FAMILY_KEY); css.append(" font-family: ").append(val).append(" ;\n"); } if (style.containsKey(StyleFormatConstants.FONT_SIZE)) { Float val = (Float) style.get(StyleFormatConstants.FONT_SIZE); css.append("font-size: ").append(val.intValue()).append("pt ;\n"); } } if (style.containsKey(StyleFormatConstants.FONT_COLOR)) { Color val = (Color) style.get(StyleFormatConstants.FONT_COLOR); css.append("color: rgb(").append(val.getRed()).append(",").append(val.getGreen()).append(",").append(val.getBlue()) .append(") ;\n"); } if (style.containsKey(StyleFormatConstants.FONT_STYLE_KEY)) { if (StyleFormatConstants.FONT_STYLE_NORMAL.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: normal ;\n"); css.append("font-style: normal; \n"); } if (StyleFormatConstants.FONT_STYLE_BOLD.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: bold; \n"); css.append("font-style: normal; \n"); } if (StyleFormatConstants.FONT_STYLE_ITALIC.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: normal; \n"); css.append("font-style: italic; \n"); } if (StyleFormatConstants.FONT_STYLE_BOLDITALIC.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: bold; \n"); css.append("font-style: italic; \n"); } } if (style.containsKey(StyleFormatConstants.BACKGROUND_COLOR)) { Color val = (Color) style.get(StyleFormatConstants.BACKGROUND_COLOR); if ((val.getRed() != 255) || (val.getGreen() != 255) || (val.getBlue() != 255)) { css.append("background-color: rgb(").append(val.getRed()).append(",").append(val.getGreen()).append(",") .append(val.getBlue()).append(") ;\n"); } } if (style.containsKey(StyleFormatConstants.HORIZONTAL_ALIGN_KEY)) { if (StyleFormatConstants.HORIZONTAL_ALIGN_LEFT.equals(style.get(StyleFormatConstants.HORIZONTAL_ALIGN_KEY))) { css.append("text-align:left; \n"); } if (StyleFormatConstants.HORIZONTAL_ALIGN_RIGHT.equals(style.get(StyleFormatConstants.HORIZONTAL_ALIGN_KEY))) { css.append("text-align:right; \n"); } if (StyleFormatConstants.HORIZONTAL_ALIGN_CENTER.equals(style.get(StyleFormatConstants.HORIZONTAL_ALIGN_KEY))) { css.append("text-align:center; \n"); } } if (style.containsKey(StyleFormatConstants.VERTICAL_ALIGN_KEY)) { if (StyleFormatConstants.VERTICAL_ALIGN_MIDDLE.equals(style.get(StyleFormatConstants.VERTICAL_ALIGN_KEY))) { css.append("vertical-align:middle; \n"); } if (StyleFormatConstants.VERTICAL_ALIGN_TOP.equals(style.get(StyleFormatConstants.VERTICAL_ALIGN_KEY))) { css.append("vertical-align:top; \n"); } if (StyleFormatConstants.VERTICAL_ALIGN_BOTTOM.equals(style.get(StyleFormatConstants.VERTICAL_ALIGN_KEY))) { css.append("vertical-align:bottom; \n"); } } if (style.containsKey(StyleFormatConstants.PADDING_LEFT)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_LEFT); css.append("padding-left:").append(val).append("pt; \n"); } if (style.containsKey(StyleFormatConstants.PADDING_RIGHT)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_RIGHT); css.append("padding-right:").append(val).append("pt; \n"); } if (style.containsKey(StyleFormatConstants.PADDING_TOP)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_TOP); css.append("padding-top:").append(val).append("pt; \n"); } if (style.containsKey(StyleFormatConstants.PADDING_BOTTOM)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_BOTTOM); css.append("padding-bottom:").append(val).append("px; \n"); } if (needed) { if (style.containsKey(StyleFormatConstants.BORDER_LEFT)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_LEFT); css.append("border-left:").append(val.intValue()).append("px; \n"); css.append("border-left-style:solid; \n"); Color color = (Color) style.get(StyleFormatConstants.BORDER_LEFT_COLOR); css.append("border-left-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); } else { css.append("border-left: none; \n"); } if (style.containsKey(StyleFormatConstants.BORDER_RIGHT)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_RIGHT); css.append("border-right:").append(val.intValue()).append("px; \n"); css.append("border-right-style:solid; \n"); Color color = (Color) style.get(StyleFormatConstants.BORDER_RIGHT_COLOR); css.append("border-right-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); } else { css.append("border-right: none; \n"); } if (style.containsKey(StyleFormatConstants.BORDER_TOP)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_TOP); css.append("border-top:").append(val.intValue()).append("px; \n"); css.append("border-top-style:solid; \n"); Color color = (Color) style.get(StyleFormatConstants.BORDER_TOP_COLOR); css.append("border-top-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); } else { css.append("border-top: none; \n"); } if (style.containsKey(StyleFormatConstants.BORDER_BOTTOM)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_BOTTOM); css.append("border-bottom:").append(val.intValue()).append("px; \n"); css.append("border-bottom-style:solid; \n"); Color color = (Color) style.get(StyleFormatConstants.BORDER_BOTTOM_COLOR); css.append("border-bottom-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); } else { css.append("border-bottom: none; \n"); } } if (be != null) { if (!be.isWrapText()) { css.append("white-space: nowrap; \n"); } else { css.append("word-wrap: break-word; \n"); css.append("line-height: " + be.getPercentLineSpacing() + "%; \n"); } // if (be.getTextRotation() != 0) { // css.append(getRotationStyle(be.getTextRotation())); // } } return css.toString(); } }
public class class_name { public static String getCssCode(BandElement be, Map<String, Object> style, boolean needed) { StringBuilder css = new StringBuilder(); if (needed) { if (style.containsKey(StyleFormatConstants.FONT_FAMILY_KEY)) { String val = (String) style.get(StyleFormatConstants.FONT_FAMILY_KEY); css.append(" font-family: ").append(val).append(" ;\n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.FONT_SIZE)) { Float val = (Float) style.get(StyleFormatConstants.FONT_SIZE); css.append("font-size: ").append(val.intValue()).append("pt ;\n"); // depends on control dependency: [if], data = [none] } } if (style.containsKey(StyleFormatConstants.FONT_COLOR)) { Color val = (Color) style.get(StyleFormatConstants.FONT_COLOR); css.append("color: rgb(").append(val.getRed()).append(",").append(val.getGreen()).append(",").append(val.getBlue()) .append(") ;\n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.FONT_STYLE_KEY)) { if (StyleFormatConstants.FONT_STYLE_NORMAL.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: normal ;\n"); // depends on control dependency: [if], data = [none] css.append("font-style: normal; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.FONT_STYLE_BOLD.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: bold; \n"); // depends on control dependency: [if], data = [none] css.append("font-style: normal; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.FONT_STYLE_ITALIC.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: normal; \n"); // depends on control dependency: [if], data = [none] css.append("font-style: italic; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.FONT_STYLE_BOLDITALIC.equals(style.get(StyleFormatConstants.FONT_STYLE_KEY))) { css.append("font-weight: bold; \n"); // depends on control dependency: [if], data = [none] css.append("font-style: italic; \n"); // depends on control dependency: [if], data = [none] } } if (style.containsKey(StyleFormatConstants.BACKGROUND_COLOR)) { Color val = (Color) style.get(StyleFormatConstants.BACKGROUND_COLOR); if ((val.getRed() != 255) || (val.getGreen() != 255) || (val.getBlue() != 255)) { css.append("background-color: rgb(").append(val.getRed()).append(",").append(val.getGreen()).append(",") .append(val.getBlue()).append(") ;\n"); // depends on control dependency: [if], data = [none] } } if (style.containsKey(StyleFormatConstants.HORIZONTAL_ALIGN_KEY)) { if (StyleFormatConstants.HORIZONTAL_ALIGN_LEFT.equals(style.get(StyleFormatConstants.HORIZONTAL_ALIGN_KEY))) { css.append("text-align:left; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.HORIZONTAL_ALIGN_RIGHT.equals(style.get(StyleFormatConstants.HORIZONTAL_ALIGN_KEY))) { css.append("text-align:right; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.HORIZONTAL_ALIGN_CENTER.equals(style.get(StyleFormatConstants.HORIZONTAL_ALIGN_KEY))) { css.append("text-align:center; \n"); // depends on control dependency: [if], data = [none] } } if (style.containsKey(StyleFormatConstants.VERTICAL_ALIGN_KEY)) { if (StyleFormatConstants.VERTICAL_ALIGN_MIDDLE.equals(style.get(StyleFormatConstants.VERTICAL_ALIGN_KEY))) { css.append("vertical-align:middle; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.VERTICAL_ALIGN_TOP.equals(style.get(StyleFormatConstants.VERTICAL_ALIGN_KEY))) { css.append("vertical-align:top; \n"); // depends on control dependency: [if], data = [none] } if (StyleFormatConstants.VERTICAL_ALIGN_BOTTOM.equals(style.get(StyleFormatConstants.VERTICAL_ALIGN_KEY))) { css.append("vertical-align:bottom; \n"); // depends on control dependency: [if], data = [none] } } if (style.containsKey(StyleFormatConstants.PADDING_LEFT)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_LEFT); css.append("padding-left:").append(val).append("pt; \n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.PADDING_RIGHT)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_RIGHT); css.append("padding-right:").append(val).append("pt; \n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.PADDING_TOP)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_TOP); css.append("padding-top:").append(val).append("pt; \n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.PADDING_BOTTOM)) { Float val = (Float) style.get(StyleFormatConstants.PADDING_BOTTOM); css.append("padding-bottom:").append(val).append("px; \n"); // depends on control dependency: [if], data = [none] } if (needed) { if (style.containsKey(StyleFormatConstants.BORDER_LEFT)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_LEFT); css.append("border-left:").append(val.intValue()).append("px; \n"); // depends on control dependency: [if], data = [none] css.append("border-left-style:solid; \n"); // depends on control dependency: [if], data = [none] Color color = (Color) style.get(StyleFormatConstants.BORDER_LEFT_COLOR); css.append("border-left-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { css.append("border-left: none; \n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.BORDER_RIGHT)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_RIGHT); css.append("border-right:").append(val.intValue()).append("px; \n"); // depends on control dependency: [if], data = [none] css.append("border-right-style:solid; \n"); // depends on control dependency: [if], data = [none] Color color = (Color) style.get(StyleFormatConstants.BORDER_RIGHT_COLOR); css.append("border-right-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { css.append("border-right: none; \n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.BORDER_TOP)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_TOP); css.append("border-top:").append(val.intValue()).append("px; \n"); // depends on control dependency: [if], data = [none] css.append("border-top-style:solid; \n"); // depends on control dependency: [if], data = [none] Color color = (Color) style.get(StyleFormatConstants.BORDER_TOP_COLOR); css.append("border-top-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { css.append("border-top: none; \n"); // depends on control dependency: [if], data = [none] } if (style.containsKey(StyleFormatConstants.BORDER_BOTTOM)) { Float val = (Float) style.get(StyleFormatConstants.BORDER_BOTTOM); css.append("border-bottom:").append(val.intValue()).append("px; \n"); // depends on control dependency: [if], data = [none] css.append("border-bottom-style:solid; \n"); // depends on control dependency: [if], data = [none] Color color = (Color) style.get(StyleFormatConstants.BORDER_BOTTOM_COLOR); css.append("border-bottom-color: ").append(Integer.toHexString(color.getRGB() & 0x00ffffff)).append(" ;\n"); // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] } else { css.append("border-bottom: none; \n"); // depends on control dependency: [if], data = [none] } } if (be != null) { if (!be.isWrapText()) { css.append("white-space: nowrap; \n"); // depends on control dependency: [if], data = [none] } else { css.append("word-wrap: break-word; \n"); // depends on control dependency: [if], data = [none] css.append("line-height: " + be.getPercentLineSpacing() + "%; \n"); // depends on control dependency: [if], data = [none] } // if (be.getTextRotation() != 0) { // css.append(getRotationStyle(be.getTextRotation())); // } } return css.toString(); } }
public class class_name { private void storeStaticField(XField staticField, Instruction obj, boolean pushStoredValue) { if (RLE_DEBUG) { System.out.println("[storeStaticField for field " + staticField + " in instruction " + handle); } ValueNumberFrame frame = getFrame(); AvailableLoad availableLoad = new AvailableLoad(staticField); int numWordsConsumed = getNumWordsConsumed(obj); ValueNumber[] inputValueList = popInputValues(numWordsConsumed); if (pushStoredValue) { pushOutputValues(inputValueList); } // Kill loads of this field frame.killLoadsOfField(staticField); // Make load available frame.addAvailableLoad(availableLoad, inputValueList); if (RLE_DEBUG) { System.out.println("[making store of " + staticField + " available]"); } if (VERIFY_INTEGRITY) { checkConsumedAndProducedValues(obj, inputValueList, pushStoredValue ? inputValueList : EMPTY_INPUT_VALUE_LIST); } } }
public class class_name { private void storeStaticField(XField staticField, Instruction obj, boolean pushStoredValue) { if (RLE_DEBUG) { System.out.println("[storeStaticField for field " + staticField + " in instruction " + handle); // depends on control dependency: [if], data = [none] } ValueNumberFrame frame = getFrame(); AvailableLoad availableLoad = new AvailableLoad(staticField); int numWordsConsumed = getNumWordsConsumed(obj); ValueNumber[] inputValueList = popInputValues(numWordsConsumed); if (pushStoredValue) { pushOutputValues(inputValueList); // depends on control dependency: [if], data = [none] } // Kill loads of this field frame.killLoadsOfField(staticField); // Make load available frame.addAvailableLoad(availableLoad, inputValueList); if (RLE_DEBUG) { System.out.println("[making store of " + staticField + " available]"); // depends on control dependency: [if], data = [none] } if (VERIFY_INTEGRITY) { checkConsumedAndProducedValues(obj, inputValueList, pushStoredValue ? inputValueList : EMPTY_INPUT_VALUE_LIST); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public int countByG_U_D(long groupId, long userId, boolean defaultWishList) { FinderPath finderPath = FINDER_PATH_COUNT_BY_G_U_D; Object[] finderArgs = new Object[] { groupId, userId, defaultWishList }; Long count = (Long)finderCache.getResult(finderPath, finderArgs, this); if (count == null) { StringBundler query = new StringBundler(4); query.append(_SQL_COUNT_COMMERCEWISHLIST_WHERE); query.append(_FINDER_COLUMN_G_U_D_GROUPID_2); query.append(_FINDER_COLUMN_G_U_D_USERID_2); query.append(_FINDER_COLUMN_G_U_D_DEFAULTWISHLIST_2); String sql = query.toString(); Session session = null; try { session = openSession(); Query q = session.createQuery(sql); QueryPos qPos = QueryPos.getInstance(q); qPos.add(groupId); qPos.add(userId); qPos.add(defaultWishList); count = (Long)q.uniqueResult(); finderCache.putResult(finderPath, finderArgs, count); } catch (Exception e) { finderCache.removeResult(finderPath, finderArgs); throw processException(e); } finally { closeSession(session); } } return count.intValue(); } }
public class class_name { @Override public int countByG_U_D(long groupId, long userId, boolean defaultWishList) { FinderPath finderPath = FINDER_PATH_COUNT_BY_G_U_D; Object[] finderArgs = new Object[] { groupId, userId, defaultWishList }; Long count = (Long)finderCache.getResult(finderPath, finderArgs, this); if (count == null) { StringBundler query = new StringBundler(4); query.append(_SQL_COUNT_COMMERCEWISHLIST_WHERE); // depends on control dependency: [if], data = [none] query.append(_FINDER_COLUMN_G_U_D_GROUPID_2); // depends on control dependency: [if], data = [none] query.append(_FINDER_COLUMN_G_U_D_USERID_2); // depends on control dependency: [if], data = [none] query.append(_FINDER_COLUMN_G_U_D_DEFAULTWISHLIST_2); // depends on control dependency: [if], data = [none] String sql = query.toString(); Session session = null; try { session = openSession(); // depends on control dependency: [try], data = [none] Query q = session.createQuery(sql); QueryPos qPos = QueryPos.getInstance(q); qPos.add(groupId); // depends on control dependency: [try], data = [none] qPos.add(userId); // depends on control dependency: [try], data = [none] qPos.add(defaultWishList); // depends on control dependency: [try], data = [none] count = (Long)q.uniqueResult(); // depends on control dependency: [try], data = [none] finderCache.putResult(finderPath, finderArgs, count); // depends on control dependency: [try], data = [none] } catch (Exception e) { finderCache.removeResult(finderPath, finderArgs); throw processException(e); } // depends on control dependency: [catch], data = [none] finally { closeSession(session); } } return count.intValue(); } }
public class class_name { private void computeNeighborhoods(Relation<O> relation, DataStore<SetDBIDs> knns, ModifiableDBIDs pruned, WritableDataStore<ModifiableDBIDs> rNNminuskNNs) { FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Finding RkNN", relation.size(), LOG) : null; for(DBIDIter iter = relation.iterDBIDs(); iter.valid(); iter.advance()) { DBIDs knn = knns.get(iter); int count = 1; // The point itself. for(DBIDIter niter = knn.iter(); niter.valid(); niter.advance()) { // Ignore the query point itself. if(DBIDUtil.equal(iter, niter)) { continue; } // As we did not initialize count with the rNN size, we check all // neighbors here. if(knns.get(niter).contains(iter)) { count++; } else { // In contrast to INFLO pseudocode, we only update if it is not found, // i.e., if it is in RkNN \setminus kNN, to save memory. rNNminuskNNs.get(niter).add(iter); } } // INFLO pruning rule if(count >= knn.size() * m) { pruned.add(iter); } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); } }
public class class_name { private void computeNeighborhoods(Relation<O> relation, DataStore<SetDBIDs> knns, ModifiableDBIDs pruned, WritableDataStore<ModifiableDBIDs> rNNminuskNNs) { FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Finding RkNN", relation.size(), LOG) : null; for(DBIDIter iter = relation.iterDBIDs(); iter.valid(); iter.advance()) { DBIDs knn = knns.get(iter); int count = 1; // The point itself. for(DBIDIter niter = knn.iter(); niter.valid(); niter.advance()) { // Ignore the query point itself. if(DBIDUtil.equal(iter, niter)) { continue; } // As we did not initialize count with the rNN size, we check all // neighbors here. if(knns.get(niter).contains(iter)) { count++; // depends on control dependency: [if], data = [none] } else { // In contrast to INFLO pseudocode, we only update if it is not found, // i.e., if it is in RkNN \setminus kNN, to save memory. rNNminuskNNs.get(niter).add(iter); // depends on control dependency: [if], data = [none] } } // INFLO pruning rule if(count >= knn.size() * m) { pruned.add(iter); // depends on control dependency: [if], data = [none] } LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none] } LOG.ensureCompleted(prog); } }
public class class_name { public List<ModelServiceInstance> getUPModelInstancesByMetadataKey(String keyName) { List<ModelServiceInstance> list = new ArrayList<>(); for (ModelServiceInstance instance : getModelInstancesByMetadataKey(keyName)) { if (instance.getStatus().equals(OperationalStatus.UP)) { list.add(instance); } } return list; } }
public class class_name { public List<ModelServiceInstance> getUPModelInstancesByMetadataKey(String keyName) { List<ModelServiceInstance> list = new ArrayList<>(); for (ModelServiceInstance instance : getModelInstancesByMetadataKey(keyName)) { if (instance.getStatus().equals(OperationalStatus.UP)) { list.add(instance); // depends on control dependency: [if], data = [none] } } return list; } }
public class class_name { private void writeTaskExtendedAttributes(Project.Tasks.Task xml, Task mpx) { Project.Tasks.Task.ExtendedAttribute attrib; List<Project.Tasks.Task.ExtendedAttribute> extendedAttributes = xml.getExtendedAttribute(); for (TaskField mpxFieldID : getAllTaskExtendedAttributes()) { Object value = mpx.getCachedValue(mpxFieldID); if (FieldTypeHelper.valueIsNotDefault(mpxFieldID, value)) { m_extendedAttributesInUse.add(mpxFieldID); Integer xmlFieldID = Integer.valueOf(MPPTaskField.getID(mpxFieldID) | MPPTaskField.TASK_FIELD_BASE); attrib = m_factory.createProjectTasksTaskExtendedAttribute(); extendedAttributes.add(attrib); attrib.setFieldID(xmlFieldID.toString()); attrib.setValue(DatatypeConverter.printExtendedAttribute(this, value, mpxFieldID.getDataType())); attrib.setDurationFormat(printExtendedAttributeDurationFormat(value)); } } } }
public class class_name { private void writeTaskExtendedAttributes(Project.Tasks.Task xml, Task mpx) { Project.Tasks.Task.ExtendedAttribute attrib; List<Project.Tasks.Task.ExtendedAttribute> extendedAttributes = xml.getExtendedAttribute(); for (TaskField mpxFieldID : getAllTaskExtendedAttributes()) { Object value = mpx.getCachedValue(mpxFieldID); if (FieldTypeHelper.valueIsNotDefault(mpxFieldID, value)) { m_extendedAttributesInUse.add(mpxFieldID); // depends on control dependency: [if], data = [none] Integer xmlFieldID = Integer.valueOf(MPPTaskField.getID(mpxFieldID) | MPPTaskField.TASK_FIELD_BASE); attrib = m_factory.createProjectTasksTaskExtendedAttribute(); // depends on control dependency: [if], data = [none] extendedAttributes.add(attrib); // depends on control dependency: [if], data = [none] attrib.setFieldID(xmlFieldID.toString()); // depends on control dependency: [if], data = [none] attrib.setValue(DatatypeConverter.printExtendedAttribute(this, value, mpxFieldID.getDataType())); // depends on control dependency: [if], data = [none] attrib.setDurationFormat(printExtendedAttributeDurationFormat(value)); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public T text(int resid, Object... formatArgs) { if (context != null) { CharSequence text = context.getString(resid, formatArgs); text(text); } return self(); } }
public class class_name { public T text(int resid, Object... formatArgs) { if (context != null) { CharSequence text = context.getString(resid, formatArgs); text(text); // depends on control dependency: [if], data = [none] } return self(); } }
public class class_name { protected void removePersistedSession(String id) { if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.entering(methodClassName, methodNames[REMOVE_PERSISTED_SESSION], id); } Connection con; PreparedStatement ps = null; boolean psClose = false; //If the app calls invalidate, it may not be removed from the local cache yet. superRemove(id); con = getConnection(false); if (con == null) { return; } try { if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "before upd " + id); } ps = con.prepareStatement(delOne); ps.setString(1, id); ps.setString(2, _iStore.getId()); ps.executeUpdate(); ps.close(); psClose = true; if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "after upd " + id); } addToRecentlyInvalidatedList(id); } catch (SQLException se) { // if (isStaleConnectionException(se)) { // com.ibm.ws.ffdc.FFDCFilter.processException(se, "com.ibm.ws.session.store.db.DatabaseHashMap.removePersistedSession", "619", id); // } else { com.ibm.ws.ffdc.FFDCFilter.processException(se, "com.ibm.ws.session.store.db.DatabaseHashMap.removePersistedSession", "621", id); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "DatabaseHashMap.removeSessionsError"); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "CommonMessage.exception", se); // } } catch (Exception e) { com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.session.store.db.DatabaseHashMap.removePersistedSession", "626", id); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "DatabaseHashMap.removeSessionsError"); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "CommonMessage.exception", e); } finally { if (!psClose && ps != null) closeStatement(ps); closeConnection(con); } } }
public class class_name { protected void removePersistedSession(String id) { if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.entering(methodClassName, methodNames[REMOVE_PERSISTED_SESSION], id); // depends on control dependency: [if], data = [none] } Connection con; PreparedStatement ps = null; boolean psClose = false; //If the app calls invalidate, it may not be removed from the local cache yet. superRemove(id); con = getConnection(false); if (con == null) { return; // depends on control dependency: [if], data = [none] } try { if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "before upd " + id); // depends on control dependency: [if], data = [none] } ps = con.prepareStatement(delOne); // depends on control dependency: [try], data = [none] ps.setString(1, id); // depends on control dependency: [try], data = [none] ps.setString(2, _iStore.getId()); // depends on control dependency: [try], data = [none] ps.executeUpdate(); // depends on control dependency: [try], data = [none] ps.close(); // depends on control dependency: [try], data = [none] psClose = true; // depends on control dependency: [try], data = [none] if (com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "after upd " + id); // depends on control dependency: [if], data = [none] } addToRecentlyInvalidatedList(id); // depends on control dependency: [try], data = [none] } catch (SQLException se) { // if (isStaleConnectionException(se)) { // com.ibm.ws.ffdc.FFDCFilter.processException(se, "com.ibm.ws.session.store.db.DatabaseHashMap.removePersistedSession", "619", id); // } else { com.ibm.ws.ffdc.FFDCFilter.processException(se, "com.ibm.ws.session.store.db.DatabaseHashMap.removePersistedSession", "621", id); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "DatabaseHashMap.removeSessionsError"); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "CommonMessage.exception", se); // } } catch (Exception e) { // depends on control dependency: [catch], data = [none] com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.session.store.db.DatabaseHashMap.removePersistedSession", "626", id); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "DatabaseHashMap.removeSessionsError"); LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[REMOVE_PERSISTED_SESSION], "CommonMessage.exception", e); } finally { // depends on control dependency: [catch], data = [none] if (!psClose && ps != null) closeStatement(ps); closeConnection(con); } } }
public class class_name { public EEnum getLineDataObjectPositionMigrationTempOrient() { if (lineDataObjectPositionMigrationTempOrientEEnum == null) { lineDataObjectPositionMigrationTempOrientEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(172); } return lineDataObjectPositionMigrationTempOrientEEnum; } }
public class class_name { public EEnum getLineDataObjectPositionMigrationTempOrient() { if (lineDataObjectPositionMigrationTempOrientEEnum == null) { lineDataObjectPositionMigrationTempOrientEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(172); // depends on control dependency: [if], data = [none] } return lineDataObjectPositionMigrationTempOrientEEnum; } }
public class class_name { public ListPoliciesGrantingServiceAccessRequest withServiceNamespaces(String... serviceNamespaces) { if (this.serviceNamespaces == null) { setServiceNamespaces(new com.amazonaws.internal.SdkInternalList<String>(serviceNamespaces.length)); } for (String ele : serviceNamespaces) { this.serviceNamespaces.add(ele); } return this; } }
public class class_name { public ListPoliciesGrantingServiceAccessRequest withServiceNamespaces(String... serviceNamespaces) { if (this.serviceNamespaces == null) { setServiceNamespaces(new com.amazonaws.internal.SdkInternalList<String>(serviceNamespaces.length)); // depends on control dependency: [if], data = [none] } for (String ele : serviceNamespaces) { this.serviceNamespaces.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public long getIdleStartMilliseconds() { if (isIdle()) return Math.max(creationTime, owner.getConnectTime()); else { return Math.max(startTime + Math.max(0, executableEstimatedDuration), System.currentTimeMillis() + 15000); } } }
public class class_name { public long getIdleStartMilliseconds() { if (isIdle()) return Math.max(creationTime, owner.getConnectTime()); else { return Math.max(startTime + Math.max(0, executableEstimatedDuration), System.currentTimeMillis() + 15000); // depends on control dependency: [if], data = [none] } } }
public class class_name { synchronized void setPrivateRoot(final Stoppable privateRoot) { if (privateRoot != null && this.privateRoot != null) { privateRoot.stop(); throw new IllegalStateException("Private root already exists."); } this.privateRoot = privateRoot; } }
public class class_name { synchronized void setPrivateRoot(final Stoppable privateRoot) { if (privateRoot != null && this.privateRoot != null) { privateRoot.stop(); // depends on control dependency: [if], data = [none] throw new IllegalStateException("Private root already exists."); } this.privateRoot = privateRoot; } }
public class class_name { @Override protected boolean prepare(final Context2D context, final Attributes attr, final double alpha) { final double w = attr.getWidth(); final double h = attr.getHeight(); if ((w > 0) && (h > 0)) { context.beginPath(); context.ellipse(0, 0, w / 2, h / 2, 0, 0, Math.PI * 2, true); context.closePath(); return true; } return false; } }
public class class_name { @Override protected boolean prepare(final Context2D context, final Attributes attr, final double alpha) { final double w = attr.getWidth(); final double h = attr.getHeight(); if ((w > 0) && (h > 0)) { context.beginPath(); // depends on control dependency: [if], data = [none] context.ellipse(0, 0, w / 2, h / 2, 0, 0, Math.PI * 2, true); // depends on control dependency: [if], data = [none] context.closePath(); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { private void drawPixels(Color color, long fileOffset, long length, int additionalGap) { assert color != null; long pixelStart = getPixelNumber(fileOffset); // necessary to avoid gaps due to rounding issues (you can't just do // getPixelNumber(fileLength)) long pixelLength = getPixelNumber(fileOffset + length) - pixelStart; long pixelMax = getXPixels() * getYPixels(); long pixelEnd = pixelStart + pixelLength; if (pixelStart > pixelMax) { logger.warn("too many pixels, max is: " + pixelMax + " and trying to set: " + pixelStart); } else { if (pixelEnd > pixelMax) { logger.warn("too many pixels, max is: " + pixelMax + " and trying to set: " + pixelEnd); pixelEnd = pixelMax; } for (long i = pixelStart; i < pixelEnd; i++) { int x = (int) ((i % getXPixels()) * pixelSize); int y = (int) ((i / getXPixels()) * pixelSize); int gap = pixelated ? additionalGap + 1 : additionalGap; int sizemodifier = pixelated ? 2 : 1; drawRect(color, x + gap, y + gap, pixelSize - gap * sizemodifier, pixelSize - gap * sizemodifier); } } // Graphics g = image.getGraphics(); // g.drawString(new Long(fileOffset).toString(), (pixelStart % xPixels) // * pixelSize,(pixelStart / xPixels) * pixelSize ); } }
public class class_name { private void drawPixels(Color color, long fileOffset, long length, int additionalGap) { assert color != null; long pixelStart = getPixelNumber(fileOffset); // necessary to avoid gaps due to rounding issues (you can't just do // getPixelNumber(fileLength)) long pixelLength = getPixelNumber(fileOffset + length) - pixelStart; long pixelMax = getXPixels() * getYPixels(); long pixelEnd = pixelStart + pixelLength; if (pixelStart > pixelMax) { logger.warn("too many pixels, max is: " + pixelMax + " and trying to set: " + pixelStart); // depends on control dependency: [if], data = [none] } else { if (pixelEnd > pixelMax) { logger.warn("too many pixels, max is: " + pixelMax + " and trying to set: " + pixelEnd); // depends on control dependency: [if], data = [none] pixelEnd = pixelMax; // depends on control dependency: [if], data = [none] } for (long i = pixelStart; i < pixelEnd; i++) { int x = (int) ((i % getXPixels()) * pixelSize); int y = (int) ((i / getXPixels()) * pixelSize); int gap = pixelated ? additionalGap + 1 : additionalGap; int sizemodifier = pixelated ? 2 : 1; drawRect(color, x + gap, y + gap, pixelSize - gap * sizemodifier, pixelSize - gap * sizemodifier); // depends on control dependency: [for], data = [none] } } // Graphics g = image.getGraphics(); // g.drawString(new Long(fileOffset).toString(), (pixelStart % xPixels) // * pixelSize,(pixelStart / xPixels) * pixelSize ); } }
public class class_name { public CORSHandlerBuilder withAllowedMethods(Method... methods) { if (methods == null) { allowedMethods = null; } else { allowedMethods = new HashSet<>(asList(methods)); } return this; } }
public class class_name { public CORSHandlerBuilder withAllowedMethods(Method... methods) { if (methods == null) { allowedMethods = null; // depends on control dependency: [if], data = [none] } else { allowedMethods = new HashSet<>(asList(methods)); // depends on control dependency: [if], data = [(methods] } return this; } }
public class class_name { public void marshall(DeleteTaskRequest deleteTaskRequest, ProtocolMarshaller protocolMarshaller) { if (deleteTaskRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteTaskRequest.getTaskArn(), TASKARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DeleteTaskRequest deleteTaskRequest, ProtocolMarshaller protocolMarshaller) { if (deleteTaskRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteTaskRequest.getTaskArn(), TASKARN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static List<String> listAllMime( String directory , String type ) { List<String> ret = new ArrayList<>(); try { // see if it's a URL or not URL url = new URL(directory); if( url.getProtocol().equals("file") ) { directory = url.getFile(); } else if( url.getProtocol().equals("jar") ) { return listJarMime(url,null,null); } else { throw new RuntimeException("Not sure what to do with this url. "+url.toString()); } } catch (MalformedURLException ignore) { } File d = new File(directory); if( !d.isDirectory() ) throw new IllegalArgumentException("Must specify an directory"); File []files = d.listFiles(); if( files == null ) return ret; for( File f : files ) { if( f.isDirectory() ) continue; try { String mimeType = Files.probeContentType(f.toPath()); if( mimeType.contains(type)) ret.add(f.getAbsolutePath()); } catch (IOException ignore) {} } Collections.sort(ret); return ret; } }
public class class_name { public static List<String> listAllMime( String directory , String type ) { List<String> ret = new ArrayList<>(); try { // see if it's a URL or not URL url = new URL(directory); if( url.getProtocol().equals("file") ) { directory = url.getFile(); // depends on control dependency: [if], data = [none] } else if( url.getProtocol().equals("jar") ) { return listJarMime(url,null,null); // depends on control dependency: [if], data = [none] } else { throw new RuntimeException("Not sure what to do with this url. "+url.toString()); } } catch (MalformedURLException ignore) { } // depends on control dependency: [catch], data = [none] File d = new File(directory); if( !d.isDirectory() ) throw new IllegalArgumentException("Must specify an directory"); File []files = d.listFiles(); if( files == null ) return ret; for( File f : files ) { if( f.isDirectory() ) continue; try { String mimeType = Files.probeContentType(f.toPath()); if( mimeType.contains(type)) ret.add(f.getAbsolutePath()); } catch (IOException ignore) {} // depends on control dependency: [catch], data = [none] } Collections.sort(ret); return ret; } }
public class class_name { protected static void writeInt(ChannelBuffer buf, int value) { if (value < 10) { buf.writeByte('0' + value); return; } StringBuilder sb = new StringBuilder(8); while (value > 0) { int digit = value % 10; sb.append((char) ('0' + digit)); value /= 10; } for (int i = sb.length() - 1; i >= 0; i--) { buf.writeByte(sb.charAt(i)); } } }
public class class_name { protected static void writeInt(ChannelBuffer buf, int value) { if (value < 10) { buf.writeByte('0' + value); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } StringBuilder sb = new StringBuilder(8); while (value > 0) { int digit = value % 10; sb.append((char) ('0' + digit)); // depends on control dependency: [while], data = [none] value /= 10; // depends on control dependency: [while], data = [none] } for (int i = sb.length() - 1; i >= 0; i--) { buf.writeByte(sb.charAt(i)); // depends on control dependency: [for], data = [i] } } }
public class class_name { public NodeIterator getNodes(String namePattern) throws RepositoryException { long start = 0; if (LOG.isDebugEnabled()) { start = System.currentTimeMillis(); LOG.debug("getNodes(String) >>>>>"); } checkValid(); try { NodeNamePatternFilter filter = new NodeNamePatternFilter(namePattern, session); List<NodeData> childs = null; if (filter.isLookingAllData()) { childs = childNodesData(); } else { childs = new ArrayList<NodeData>(dataManager.getChildNodesData(nodeData(), filter.getQPathEntryFilters())); Collections.sort(childs, new NodeDataOrderComparator()); } if (childs.size() < session.getLazyReadThreshold()) { // full iterator List<NodeImpl> nodes = new ArrayList<NodeImpl>(); for (int i = 0, length = childs.size(); i < length; i++) { NodeData child = childs.get(i); if (filter.accept(child) && session.getAccessManager().hasPermission(child.getACL(), new String[]{PermissionType.READ}, session.getUserState().getIdentity())) { NodeImpl item = (NodeImpl)dataManager.readItem(child, nodeData(), true, false); session.getActionHandler().postRead(item); nodes.add(item); } } return new EntityCollection(nodes); } else { // lazy iterator return new LazyNodeIterator(childs, filter); } } finally { if (LOG.isDebugEnabled()) { LOG.debug("getNodes(String) <<<<< " + ((System.currentTimeMillis() - start) / 1000d) + "sec"); } } } }
public class class_name { public NodeIterator getNodes(String namePattern) throws RepositoryException { long start = 0; if (LOG.isDebugEnabled()) { start = System.currentTimeMillis(); LOG.debug("getNodes(String) >>>>>"); } checkValid(); try { NodeNamePatternFilter filter = new NodeNamePatternFilter(namePattern, session); List<NodeData> childs = null; if (filter.isLookingAllData()) { childs = childNodesData(); // depends on control dependency: [if], data = [none] } else { childs = new ArrayList<NodeData>(dataManager.getChildNodesData(nodeData(), filter.getQPathEntryFilters())); // depends on control dependency: [if], data = [none] Collections.sort(childs, new NodeDataOrderComparator()); // depends on control dependency: [if], data = [none] } if (childs.size() < session.getLazyReadThreshold()) { // full iterator List<NodeImpl> nodes = new ArrayList<NodeImpl>(); for (int i = 0, length = childs.size(); i < length; i++) { NodeData child = childs.get(i); if (filter.accept(child) && session.getAccessManager().hasPermission(child.getACL(), new String[]{PermissionType.READ}, session.getUserState().getIdentity())) { NodeImpl item = (NodeImpl)dataManager.readItem(child, nodeData(), true, false); session.getActionHandler().postRead(item); // depends on control dependency: [if], data = [none] nodes.add(item); // depends on control dependency: [if], data = [none] } } return new EntityCollection(nodes); // depends on control dependency: [if], data = [none] } else { // lazy iterator return new LazyNodeIterator(childs, filter); // depends on control dependency: [if], data = [none] } } finally { if (LOG.isDebugEnabled()) { LOG.debug("getNodes(String) <<<<< " + ((System.currentTimeMillis() - start) / 1000d) + "sec"); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public EmailAddress parseToEmailAddress(final String email) { final ParsedAddress parsedAddress = parse(email); if (!parsedAddress.isValid()) { return null; } return new EmailAddress(parsedAddress.getPersonalName(), parsedAddress.getLocalPart() + '@' + parsedAddress.getDomain()); } }
public class class_name { public EmailAddress parseToEmailAddress(final String email) { final ParsedAddress parsedAddress = parse(email); if (!parsedAddress.isValid()) { return null; // depends on control dependency: [if], data = [none] } return new EmailAddress(parsedAddress.getPersonalName(), parsedAddress.getLocalPart() + '@' + parsedAddress.getDomain()); } }
public class class_name { public List<ResourceGrant> releaseResource(List<Integer> idList) { if (deleted) { throw new RuntimeException("Session: " + sessionId + " has been deleted"); } List<ResourceGrant> canceledGrants = new ArrayList<ResourceGrant>(); for (Integer id : idList) { ResourceRequestInfo req = idToRequest.get(id); if (req != null) { idToRequest.remove(id); ResourceGrant grant = idToGrant.remove(id); if (grant != null) { // we have previously granted this resource, return to caller canceledGrants.add(grant); removeGrantedRequest(req, false); } else { removePendingRequest(req); } incrementRequestCount(req.getType(), -1); } } return canceledGrants; } }
public class class_name { public List<ResourceGrant> releaseResource(List<Integer> idList) { if (deleted) { throw new RuntimeException("Session: " + sessionId + " has been deleted"); } List<ResourceGrant> canceledGrants = new ArrayList<ResourceGrant>(); for (Integer id : idList) { ResourceRequestInfo req = idToRequest.get(id); if (req != null) { idToRequest.remove(id); // depends on control dependency: [if], data = [none] ResourceGrant grant = idToGrant.remove(id); if (grant != null) { // we have previously granted this resource, return to caller canceledGrants.add(grant); // depends on control dependency: [if], data = [(grant] removeGrantedRequest(req, false); // depends on control dependency: [if], data = [none] } else { removePendingRequest(req); // depends on control dependency: [if], data = [none] } incrementRequestCount(req.getType(), -1); // depends on control dependency: [if], data = [(req] } } return canceledGrants; } }
public class class_name { public FSTObjectInput getObjectInput( byte arr[], int len ) { FSTObjectInput fstObjectInput = getIn(); try { fstObjectInput.resetForReuseUseArray(arr,len); return fstObjectInput; } catch (IOException e) { FSTUtil.<RuntimeException>rethrow(e); } return null; } }
public class class_name { public FSTObjectInput getObjectInput( byte arr[], int len ) { FSTObjectInput fstObjectInput = getIn(); try { fstObjectInput.resetForReuseUseArray(arr,len); // depends on control dependency: [try], data = [none] return fstObjectInput; // depends on control dependency: [try], data = [none] } catch (IOException e) { FSTUtil.<RuntimeException>rethrow(e); } // depends on control dependency: [catch], data = [none] return null; } }
public class class_name { public EClass getIfcTextDecoration() { if (ifcTextDecorationEClass == null) { ifcTextDecorationEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(746); } return ifcTextDecorationEClass; } }
public class class_name { public EClass getIfcTextDecoration() { if (ifcTextDecorationEClass == null) { ifcTextDecorationEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(746); // depends on control dependency: [if], data = [none] } return ifcTextDecorationEClass; } }
public class class_name { public Optional<URI> getURI(String key) { Optional<String> property = getString(key); if (property.isPresent()) { return Optional.of(URIUtils.create(property.get())); } else { return Optional.empty(); } } }
public class class_name { public Optional<URI> getURI(String key) { Optional<String> property = getString(key); if (property.isPresent()) { return Optional.of(URIUtils.create(property.get())); // depends on control dependency: [if], data = [none] } else { return Optional.empty(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static LoginConfigMetaData getLoginConfig(final JBossWebMetaData jbossWebMD) { LoginConfigMetaData loginConfigMD = jbossWebMD.getLoginConfig(); if (loginConfigMD == null) { loginConfigMD = new LoginConfigMetaData(); jbossWebMD.setLoginConfig(loginConfigMD); } return loginConfigMD; } }
public class class_name { public static LoginConfigMetaData getLoginConfig(final JBossWebMetaData jbossWebMD) { LoginConfigMetaData loginConfigMD = jbossWebMD.getLoginConfig(); if (loginConfigMD == null) { loginConfigMD = new LoginConfigMetaData(); // depends on control dependency: [if], data = [none] jbossWebMD.setLoginConfig(loginConfigMD); // depends on control dependency: [if], data = [(loginConfigMD] } return loginConfigMD; } }
public class class_name { public static long searchMax(long[] longArray) { if(longArray.length == 0) { throw new IllegalArgumentException("The array you provided does not have any elements"); } long max = longArray[0]; for(int i = 1; i < longArray.length; i++) { if(longArray[i] > max) { max = longArray[i]; } } return max; } }
public class class_name { public static long searchMax(long[] longArray) { if(longArray.length == 0) { throw new IllegalArgumentException("The array you provided does not have any elements"); } long max = longArray[0]; for(int i = 1; i < longArray.length; i++) { if(longArray[i] > max) { max = longArray[i]; // depends on control dependency: [if], data = [none] } } return max; } }
public class class_name { public static boolean isLayoutRtl(View view) { if (Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1) { return view.getLayoutDirection() == View.LAYOUT_DIRECTION_RTL; } else { // All layouts are LTR before JB MR1. return false; } } }
public class class_name { public static boolean isLayoutRtl(View view) { if (Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1) { return view.getLayoutDirection() == View.LAYOUT_DIRECTION_RTL; // depends on control dependency: [if], data = [none] } else { // All layouts are LTR before JB MR1. return false; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public DevState state() { MDC.setContextMap(contextMap); xlogger.entry(); try { state = getState(); } catch (final DevFailed e) { try { stateImpl.stateMachine(DeviceState.UNKNOWN); statusImpl.statusMachine(DevFailedUtils.toString(e), DeviceState.UNKNOWN); state = DevState.UNKNOWN; } catch (final DevFailed e1) { logger.debug(NOT_IMPORTANT_ERROR, e1); } logger.debug(NOT_IMPORTANT_ERROR, e); } return state; } }
public class class_name { @Override public DevState state() { MDC.setContextMap(contextMap); xlogger.entry(); try { state = getState(); // depends on control dependency: [try], data = [none] } catch (final DevFailed e) { try { stateImpl.stateMachine(DeviceState.UNKNOWN); // depends on control dependency: [try], data = [none] statusImpl.statusMachine(DevFailedUtils.toString(e), DeviceState.UNKNOWN); // depends on control dependency: [try], data = [none] state = DevState.UNKNOWN; // depends on control dependency: [try], data = [none] } catch (final DevFailed e1) { logger.debug(NOT_IMPORTANT_ERROR, e1); } // depends on control dependency: [catch], data = [none] logger.debug(NOT_IMPORTANT_ERROR, e); } // depends on control dependency: [catch], data = [none] return state; } }
public class class_name { @SuppressWarnings("rawtypes") public List mget(Object... keys) { Jedis jedis = getJedis(); try { byte[][] keysBytesArray = keysToBytesArray(keys); List<byte[]> data = jedis.mget(keysBytesArray); return valueListFromBytesList(data); } finally {close(jedis);} } }
public class class_name { @SuppressWarnings("rawtypes") public List mget(Object... keys) { Jedis jedis = getJedis(); try { byte[][] keysBytesArray = keysToBytesArray(keys); List<byte[]> data = jedis.mget(keysBytesArray); return valueListFromBytesList(data); // depends on control dependency: [try], data = [none] } finally {close(jedis);} } }
public class class_name { private static String convertToHex(byte[] data) { final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < data.length; i++) { int halfByte = (data[i] >>> 4) & 0x0F; int twoHalves = 0; do { if ((0 <= halfByte) && (halfByte <= 9)) { buffer.append((char) ('0' + halfByte)); } else { buffer.append((char) ('a' + (halfByte - 10))); } halfByte = data[i] & 0x0F; } while(twoHalves++ < 1); } return buffer.toString(); } }
public class class_name { private static String convertToHex(byte[] data) { final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < data.length; i++) { int halfByte = (data[i] >>> 4) & 0x0F; int twoHalves = 0; do { if ((0 <= halfByte) && (halfByte <= 9)) { buffer.append((char) ('0' + halfByte)); // depends on control dependency: [if], data = [none] } else { buffer.append((char) ('a' + (halfByte - 10))); // depends on control dependency: [if], data = [none] } halfByte = data[i] & 0x0F; } while(twoHalves++ < 1); } return buffer.toString(); } }
public class class_name { LoadClass getClassByID(long classObjectID) { long[] offset = new long[] { startOffset }; while (offset[0] < endOffset) { long start = offset[0]; long classID = readLoadClassID(offset); if (classID == classObjectID) { return new LoadClass(this, start); } } return null; } }
public class class_name { LoadClass getClassByID(long classObjectID) { long[] offset = new long[] { startOffset }; while (offset[0] < endOffset) { long start = offset[0]; long classID = readLoadClassID(offset); if (classID == classObjectID) { return new LoadClass(this, start); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { boolean loadCandidateLibrary(String aLibraryName, Long aMajorVersion, List<String> aLibCandidates) { boolean retval = false; for (String candidate : aLibCandidates) { log .trace( "Attempt: library load of library: {}; version: {}: relative path: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), candidate }); File candidateFile = new File(candidate); if (candidateFile.exists()) { String absPath = candidateFile.getAbsolutePath(); try { log .trace( "Attempt: library load of library: {}; version: {}: absolute path: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath }); // Here's where we attempt the actual load. System.load(absPath); log .trace( "Success: library load of library: {}; version: {}: absolute path: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath }); // if we got here, we loaded successfully setLoadedLibrary(aLibraryName, aMajorVersion); retval = true; break; } catch (UnsatisfiedLinkError e) { log .warn( "Failure: library load of library: {}; version: {}: absolute path: {}; error: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath, e }); } catch (SecurityException e) { log .warn( "Failure: library load of library: {}; version: {}: absolute path: {}; error: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath, e }); } } } return retval; } }
public class class_name { boolean loadCandidateLibrary(String aLibraryName, Long aMajorVersion, List<String> aLibCandidates) { boolean retval = false; for (String candidate : aLibCandidates) { log .trace( "Attempt: library load of library: {}; version: {}: relative path: {}", // depends on control dependency: [for], data = [none] new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), candidate }); File candidateFile = new File(candidate); if (candidateFile.exists()) { String absPath = candidateFile.getAbsolutePath(); try { log .trace( "Attempt: library load of library: {}; version: {}: absolute path: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath }); // depends on control dependency: [if], data = [none] // Here's where we attempt the actual load. System.load(absPath); // depends on control dependency: [if], data = [none] log .trace( "Success: library load of library: {}; version: {}: absolute path: {}", // depends on control dependency: [if], data = [none] new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath }); // if we got here, we loaded successfully setLoadedLibrary(aLibraryName, aMajorVersion); retval = true; break; } catch (UnsatisfiedLinkError e) { log .warn( "Failure: library load of library: {}; version: {}: absolute path: {}; error: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath, e }); } catch (SecurityException e) { log .warn( "Failure: library load of library: {}; version: {}: absolute path: {}; error: {}", new Object[] { aLibraryName, aMajorVersion == null ? "<unspecified>" : aMajorVersion .longValue(), absPath, e }); } } } return retval; } }
public class class_name { public static Set<IProcessor> createStandardProcessorsSet(final String dialectPrefix) { /* * It is important that we create new instances here because, if there are * several dialects in the TemplateEngine that extend StandardDialect, they should * not be returning the exact same instances for their processors in order * to allow specific instances to be directly linked with their owner dialect. */ final Set<IProcessor> processors = new LinkedHashSet<IProcessor>(); /* * ------------------------ * ------------------------ * HTML TEMPLATE MODE * ------------------------ * ------------------------ */ /* * HTML: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardActionTagProcessor(dialectPrefix)); processors.add(new StandardAltTitleTagProcessor(dialectPrefix)); processors.add(new StandardAssertTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardAttrTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardAttrappendTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardAttrprependTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardClassappendTagProcessor(dialectPrefix)); for (final String attrName : StandardConditionalFixedValueTagProcessor.ATTR_NAMES) { processors.add(new StandardConditionalFixedValueTagProcessor(dialectPrefix, attrName)); } for (final String attrName : StandardDOMEventAttributeTagProcessor.ATTR_NAMES) { processors.add(new StandardDOMEventAttributeTagProcessor(dialectPrefix, attrName)); } processors.add(new StandardEachTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardFragmentTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardHrefTagProcessor(dialectPrefix)); processors.add(new StandardIfTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardIncludeTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardInlineHTMLTagProcessor(dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardLangXmlLangTagProcessor(dialectPrefix)); processors.add(new StandardMethodTagProcessor(dialectPrefix)); for (final String attrName : StandardNonRemovableAttributeTagProcessor.ATTR_NAMES) { processors.add(new StandardNonRemovableAttributeTagProcessor(dialectPrefix, attrName)); } processors.add(new StandardObjectTagProcessor(TemplateMode.HTML, dialectPrefix)); for (final String attrName : StandardRemovableAttributeTagProcessor.ATTR_NAMES) { processors.add(new StandardRemovableAttributeTagProcessor(dialectPrefix, attrName)); } processors.add(new StandardRemoveTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardSrcTagProcessor(dialectPrefix)); processors.add(new StandardStyleappendTagProcessor(dialectPrefix)); processors.add(new StandardSubstituteByTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardSwitchTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardValueTagProcessor(dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardXmlBaseTagProcessor(dialectPrefix)); processors.add(new StandardXmlLangTagProcessor(dialectPrefix)); processors.add(new StandardXmlSpaceTagProcessor(dialectPrefix)); processors.add(new StandardXmlNsTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardRefAttributeTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardDefaultAttributesTagProcessor(TemplateMode.HTML, dialectPrefix)); /* * HTML: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.HTML, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); /* * HTML: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each * template mode in the StandardDialect (see AbstractStandardInliner for details). So if new processors * are added here, it should be for a really compelling reason. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningTextProcessor(TemplateMode.HTML)); /* * HTML: CDATASection PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCDATASectionProcessor(TemplateMode.HTML)); /* * HTML: DOCTYPE PROCESSORS */ processors.add(new StandardTranslationDocTypeProcessor()); /* * HTML: COMMENT PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCommentProcessor(TemplateMode.HTML)); processors.add(new StandardConditionalCommentProcessor()); /* * HTML: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.HTML)); /* * ------------------------ * ------------------------ * XML TEMPLATE MODE * ------------------------ * ------------------------ */ /* * XML: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardAttrTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardAttrappendTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardAttrprependTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardFragmentTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardIfTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardIncludeTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardInlineXMLTagProcessor(dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardSubstituteByTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardSwitchTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardXmlNsTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardRefAttributeTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardDefaultAttributesTagProcessor(TemplateMode.XML, dialectPrefix)); /* * XML: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.XML, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); /* * XML: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningTextProcessor(TemplateMode.XML)); /* * XML: CDATASection PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCDATASectionProcessor(TemplateMode.XML)); /* * XML: COMMENT PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCommentProcessor(TemplateMode.XML)); /* * XML: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.XML)); /* * ------------------------ * ------------------------ * TEXT TEMPLATE MODE * ------------------------ * ------------------------ */ /* * TEXT: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.TEXT, dialectPrefix)); // No th:fragment attribute in text modes: no fragment selection available! processors.add(new StandardIfTagProcessor(TemplateMode.TEXT, dialectPrefix)); // No th:include to be added here, as it is already deprecated since 3.0 processors.add(new StandardInlineTextualTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.TEXT, dialectPrefix)); // No th:substituteby to be added here, as it is already deprecated since 2.1 processors.add(new StandardSwitchTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.TEXT, dialectPrefix)); /* * TEXT: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.TEXT, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); processors.add(new StandardBlockTagProcessor(TemplateMode.TEXT, null, "")); // With no name, will process [# th....] elements /* * TEXT: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. */ processors.add(new StandardInliningTextProcessor(TemplateMode.TEXT)); /* * TEXT: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.TEXT)); /* * ------------------------ * ------------------------ * JAVASCRIPT TEMPLATE MODE * ------------------------ * ------------------------ */ /* * JAVASCRIPT: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); // No th:fragment attribute in text modes: no fragment selection available! processors.add(new StandardIfTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); // No th:include to be added here, as it is already deprecated since 3.0 processors.add(new StandardInlineTextualTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); // No th:substituteby to be added here, as it is already deprecated since 2.1 processors.add(new StandardSwitchTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); /* * JAVASCRIPT: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); processors.add(new StandardBlockTagProcessor(TemplateMode.JAVASCRIPT, null, "")); // With no name, will process [# th....] elements /* * JAVASCRIPT: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. */ processors.add(new StandardInliningTextProcessor(TemplateMode.JAVASCRIPT)); /* * JAVASCRIPT: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.JAVASCRIPT)); /* * ------------------------ * ------------------------ * CSS TEMPLATE MODE * ------------------------ * ------------------------ */ /* * CSS: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.CSS, dialectPrefix)); // No th:fragment attribute in text modes: no fragment selection available! processors.add(new StandardIfTagProcessor(TemplateMode.CSS, dialectPrefix)); // No th:include to be added here, as it is already deprecated since 3.0 processors.add(new StandardInlineTextualTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.CSS, dialectPrefix)); // No th:substituteby to be added here, as it is already deprecated since 2.1 processors.add(new StandardSwitchTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.CSS, dialectPrefix)); /* * CSS: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.CSS, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); processors.add(new StandardBlockTagProcessor(TemplateMode.CSS, null, "")); // With no name, will process [# th....] elements /* * CSS: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. */ processors.add(new StandardInliningTextProcessor(TemplateMode.CSS)); /* * CSS: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.CSS)); /* * ------------------------ * ------------------------ * RAW TEMPLATE MODE * ------------------------ * ------------------------ */ // No processors defined for template mode. Note only TextProcessors would be possible in this template mode, // given the entire templates are considered Text. return processors; } }
public class class_name { public static Set<IProcessor> createStandardProcessorsSet(final String dialectPrefix) { /* * It is important that we create new instances here because, if there are * several dialects in the TemplateEngine that extend StandardDialect, they should * not be returning the exact same instances for their processors in order * to allow specific instances to be directly linked with their owner dialect. */ final Set<IProcessor> processors = new LinkedHashSet<IProcessor>(); /* * ------------------------ * ------------------------ * HTML TEMPLATE MODE * ------------------------ * ------------------------ */ /* * HTML: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardActionTagProcessor(dialectPrefix)); processors.add(new StandardAltTitleTagProcessor(dialectPrefix)); processors.add(new StandardAssertTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardAttrTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardAttrappendTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardAttrprependTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardClassappendTagProcessor(dialectPrefix)); for (final String attrName : StandardConditionalFixedValueTagProcessor.ATTR_NAMES) { processors.add(new StandardConditionalFixedValueTagProcessor(dialectPrefix, attrName)); // depends on control dependency: [for], data = [attrName] } for (final String attrName : StandardDOMEventAttributeTagProcessor.ATTR_NAMES) { processors.add(new StandardDOMEventAttributeTagProcessor(dialectPrefix, attrName)); // depends on control dependency: [for], data = [attrName] } processors.add(new StandardEachTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardFragmentTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardHrefTagProcessor(dialectPrefix)); processors.add(new StandardIfTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardIncludeTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardInlineHTMLTagProcessor(dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardLangXmlLangTagProcessor(dialectPrefix)); processors.add(new StandardMethodTagProcessor(dialectPrefix)); for (final String attrName : StandardNonRemovableAttributeTagProcessor.ATTR_NAMES) { processors.add(new StandardNonRemovableAttributeTagProcessor(dialectPrefix, attrName)); // depends on control dependency: [for], data = [attrName] } processors.add(new StandardObjectTagProcessor(TemplateMode.HTML, dialectPrefix)); for (final String attrName : StandardRemovableAttributeTagProcessor.ATTR_NAMES) { processors.add(new StandardRemovableAttributeTagProcessor(dialectPrefix, attrName)); // depends on control dependency: [for], data = [attrName] } processors.add(new StandardRemoveTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardSrcTagProcessor(dialectPrefix)); processors.add(new StandardStyleappendTagProcessor(dialectPrefix)); processors.add(new StandardSubstituteByTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardSwitchTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardValueTagProcessor(dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardXmlBaseTagProcessor(dialectPrefix)); processors.add(new StandardXmlLangTagProcessor(dialectPrefix)); processors.add(new StandardXmlSpaceTagProcessor(dialectPrefix)); processors.add(new StandardXmlNsTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardRefAttributeTagProcessor(TemplateMode.HTML, dialectPrefix)); processors.add(new StandardDefaultAttributesTagProcessor(TemplateMode.HTML, dialectPrefix)); /* * HTML: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.HTML, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); /* * HTML: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each * template mode in the StandardDialect (see AbstractStandardInliner for details). So if new processors * are added here, it should be for a really compelling reason. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningTextProcessor(TemplateMode.HTML)); /* * HTML: CDATASection PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCDATASectionProcessor(TemplateMode.HTML)); /* * HTML: DOCTYPE PROCESSORS */ processors.add(new StandardTranslationDocTypeProcessor()); /* * HTML: COMMENT PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCommentProcessor(TemplateMode.HTML)); processors.add(new StandardConditionalCommentProcessor()); /* * HTML: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.HTML)); /* * ------------------------ * ------------------------ * XML TEMPLATE MODE * ------------------------ * ------------------------ */ /* * XML: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardAttrTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardAttrappendTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardAttrprependTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardFragmentTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardIfTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardIncludeTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardInlineXMLTagProcessor(dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardSubstituteByTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardSwitchTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardXmlNsTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardRefAttributeTagProcessor(TemplateMode.XML, dialectPrefix)); processors.add(new StandardDefaultAttributesTagProcessor(TemplateMode.XML, dialectPrefix)); /* * XML: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.XML, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); /* * XML: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningTextProcessor(TemplateMode.XML)); /* * XML: CDATASection PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCDATASectionProcessor(TemplateMode.XML)); /* * XML: COMMENT PROCESSORS * * NOTE as happens with text processors, adding a processor here would convert models in non-reshapable. * See EngineConfiguration#isModelReshapable() */ processors.add(new StandardInliningCommentProcessor(TemplateMode.XML)); /* * XML: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.XML)); /* * ------------------------ * ------------------------ * TEXT TEMPLATE MODE * ------------------------ * ------------------------ */ /* * TEXT: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.TEXT, dialectPrefix)); // No th:fragment attribute in text modes: no fragment selection available! processors.add(new StandardIfTagProcessor(TemplateMode.TEXT, dialectPrefix)); // No th:include to be added here, as it is already deprecated since 3.0 processors.add(new StandardInlineTextualTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.TEXT, dialectPrefix)); // No th:substituteby to be added here, as it is already deprecated since 2.1 processors.add(new StandardSwitchTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.TEXT, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.TEXT, dialectPrefix)); /* * TEXT: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.TEXT, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); processors.add(new StandardBlockTagProcessor(TemplateMode.TEXT, null, "")); // With no name, will process [# th....] elements /* * TEXT: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. */ processors.add(new StandardInliningTextProcessor(TemplateMode.TEXT)); /* * TEXT: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.TEXT)); /* * ------------------------ * ------------------------ * JAVASCRIPT TEMPLATE MODE * ------------------------ * ------------------------ */ /* * JAVASCRIPT: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); // No th:fragment attribute in text modes: no fragment selection available! processors.add(new StandardIfTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); // No th:include to be added here, as it is already deprecated since 3.0 processors.add(new StandardInlineTextualTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); // No th:substituteby to be added here, as it is already deprecated since 2.1 processors.add(new StandardSwitchTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix)); /* * JAVASCRIPT: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.JAVASCRIPT, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); processors.add(new StandardBlockTagProcessor(TemplateMode.JAVASCRIPT, null, "")); // With no name, will process [# th....] elements /* * JAVASCRIPT: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. */ processors.add(new StandardInliningTextProcessor(TemplateMode.JAVASCRIPT)); /* * JAVASCRIPT: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.JAVASCRIPT)); /* * ------------------------ * ------------------------ * CSS TEMPLATE MODE * ------------------------ * ------------------------ */ /* * CSS: ATTRIBUTE TAG PROCESSORS */ processors.add(new StandardAssertTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardCaseTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardEachTagProcessor(TemplateMode.CSS, dialectPrefix)); // No th:fragment attribute in text modes: no fragment selection available! processors.add(new StandardIfTagProcessor(TemplateMode.CSS, dialectPrefix)); // No th:include to be added here, as it is already deprecated since 3.0 processors.add(new StandardInlineTextualTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardInsertTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardObjectTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardRemoveTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardReplaceTagProcessor(TemplateMode.CSS, dialectPrefix)); // No th:substituteby to be added here, as it is already deprecated since 2.1 processors.add(new StandardSwitchTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardTextTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardUnlessTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardUtextTagProcessor(TemplateMode.CSS, dialectPrefix)); processors.add(new StandardWithTagProcessor(TemplateMode.CSS, dialectPrefix)); /* * CSS: ELEMENT TAG PROCESSORS */ processors.add(new StandardBlockTagProcessor(TemplateMode.CSS, dialectPrefix, StandardBlockTagProcessor.ELEMENT_NAME)); processors.add(new StandardBlockTagProcessor(TemplateMode.CSS, null, "")); // With no name, will process [# th....] elements /* * CSS: TEXT PROCESSORS * * NOTE the ability of the Standard Inlining mechanism to directly write to output instead of generating * internal Strings relies on the fact that there is only ONE ITextProcessor instance for each template mode * in the StandardDialect (see AbstractStandardInliner for details). So if new processors are added here, * it should be for a really compelling reason. */ processors.add(new StandardInliningTextProcessor(TemplateMode.CSS)); /* * CSS: TEMPLATE BOUNDARIES PROCESSORS */ processors.add(new StandardInlineEnablementTemplateBoundariesProcessor(TemplateMode.CSS)); /* * ------------------------ * ------------------------ * RAW TEMPLATE MODE * ------------------------ * ------------------------ */ // No processors defined for template mode. Note only TextProcessors would be possible in this template mode, // given the entire templates are considered Text. return processors; } }
public class class_name { public URIBuilder appendDateTime(URIBuilder builder, Interval<DateUnit> dateInterval, Interval<TimeUnit> timeInterval) { if (dateInterval != null || timeInterval != null) { ObjectNode dateTimeNode = new ObjectMapper().createObjectNode(); if (dateInterval != null) { dateTimeNode.setAll(dateInterval.toJson()); } if (timeInterval != null) { dateTimeNode.setAll(timeInterval.toJson()); } builder.addParameter("dateTimeDefinition", dateTimeNode.toString()); } return builder; } }
public class class_name { public URIBuilder appendDateTime(URIBuilder builder, Interval<DateUnit> dateInterval, Interval<TimeUnit> timeInterval) { if (dateInterval != null || timeInterval != null) { ObjectNode dateTimeNode = new ObjectMapper().createObjectNode(); if (dateInterval != null) { dateTimeNode.setAll(dateInterval.toJson()); // depends on control dependency: [if], data = [(dateInterval] } if (timeInterval != null) { dateTimeNode.setAll(timeInterval.toJson()); // depends on control dependency: [if], data = [(timeInterval] } builder.addParameter("dateTimeDefinition", dateTimeNode.toString()); // depends on control dependency: [if], data = [none] } return builder; } }
public class class_name { public Set<String> extractColumnsToInsertOrUpdate(final JQLContext jqlContext, String jqlValue, final Finder<SQLProperty> entity) { final Set<String> result = new LinkedHashSet<String>(); final One<Boolean> selectionOn = new One<Boolean>(null); final One<Boolean> insertOn = new One<Boolean>(null); // Column_name_set is needed for insert // Columns_to_update is needed for update analyzeInternal(jqlContext, jqlValue, new JqlBaseListener() { @Override public void enterColumn_name_set(Column_name_setContext ctx) { if (insertOn.value0 == null) { insertOn.value0 = true; } } @Override public void exitColumn_name_set(Column_name_setContext ctx) { insertOn.value0 = false; } @Override public void enterColumns_to_update(Columns_to_updateContext ctx) { if (selectionOn.value0 == null) { selectionOn.value0 = true; } } @Override public void exitColumns_to_update(Columns_to_updateContext ctx) { selectionOn.value0 = false; } @Override public void enterColumn_name(Column_nameContext ctx) { // works for INSERTS if (insertOn.value0 != null && insertOn.value0 == true) { result.add(ctx.getText()); } } @Override public void enterColumn_name_to_update(Column_name_to_updateContext ctx) { result.add(ctx.getText()); } }); return result; } }
public class class_name { public Set<String> extractColumnsToInsertOrUpdate(final JQLContext jqlContext, String jqlValue, final Finder<SQLProperty> entity) { final Set<String> result = new LinkedHashSet<String>(); final One<Boolean> selectionOn = new One<Boolean>(null); final One<Boolean> insertOn = new One<Boolean>(null); // Column_name_set is needed for insert // Columns_to_update is needed for update analyzeInternal(jqlContext, jqlValue, new JqlBaseListener() { @Override public void enterColumn_name_set(Column_name_setContext ctx) { if (insertOn.value0 == null) { insertOn.value0 = true; // depends on control dependency: [if], data = [none] } } @Override public void exitColumn_name_set(Column_name_setContext ctx) { insertOn.value0 = false; } @Override public void enterColumns_to_update(Columns_to_updateContext ctx) { if (selectionOn.value0 == null) { selectionOn.value0 = true; // depends on control dependency: [if], data = [none] } } @Override public void exitColumns_to_update(Columns_to_updateContext ctx) { selectionOn.value0 = false; } @Override public void enterColumn_name(Column_nameContext ctx) { // works for INSERTS if (insertOn.value0 != null && insertOn.value0 == true) { result.add(ctx.getText()); // depends on control dependency: [if], data = [none] } } @Override public void enterColumn_name_to_update(Column_name_to_updateContext ctx) { result.add(ctx.getText()); } }); return result; } }
public class class_name { public int[] getNonZeroIndices() { if (nonZeroIndices == null) { nonZeroIndices = vector.keys(); Arrays.sort(nonZeroIndices); } return nonZeroIndices; } }
public class class_name { public int[] getNonZeroIndices() { if (nonZeroIndices == null) { nonZeroIndices = vector.keys(); // depends on control dependency: [if], data = [none] Arrays.sort(nonZeroIndices); // depends on control dependency: [if], data = [(nonZeroIndices] } return nonZeroIndices; } }
public class class_name { private CompletableFuture<Void> flush(Void ignored) { checkRunning(); long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "flush"); // Flush everything we can flush. val flushFutures = this.processors.values().stream() .filter(ProcessorCollection::mustFlush) .map(a -> a.flush(this.config.getFlushTimeout())) .collect(Collectors.toList()); return Futures .allOfWithResults(flushFutures) .thenAcceptAsync(flushResults -> { FlushStageResult result = new FlushStageResult(); flushResults.forEach(result::withFlushResult); if (result.getFlushedBytes() + result.getMergedBytes() + result.count > 0) { logStageEvent("Flush", result); } LoggerHelpers.traceLeave(log, this.traceObjectId, "flush", traceId); }, this.executor); } }
public class class_name { private CompletableFuture<Void> flush(Void ignored) { checkRunning(); long traceId = LoggerHelpers.traceEnterWithContext(log, this.traceObjectId, "flush"); // Flush everything we can flush. val flushFutures = this.processors.values().stream() .filter(ProcessorCollection::mustFlush) .map(a -> a.flush(this.config.getFlushTimeout())) .collect(Collectors.toList()); return Futures .allOfWithResults(flushFutures) .thenAcceptAsync(flushResults -> { FlushStageResult result = new FlushStageResult(); flushResults.forEach(result::withFlushResult); if (result.getFlushedBytes() + result.getMergedBytes() + result.count > 0) { logStageEvent("Flush", result); // depends on control dependency: [if], data = [none] } LoggerHelpers.traceLeave(log, this.traceObjectId, "flush", traceId); }, this.executor); } }
public class class_name { private void processFields() { List<Field> children = IntrospectionUtils.getPersistableFields(clazz); for (Field child : children) { if (child.isAnnotationPresent(Embedded.class)) { processEmbeddedField(child); } else { processSimpleField(child); } } } }
public class class_name { private void processFields() { List<Field> children = IntrospectionUtils.getPersistableFields(clazz); for (Field child : children) { if (child.isAnnotationPresent(Embedded.class)) { processEmbeddedField(child); // depends on control dependency: [if], data = [none] } else { processSimpleField(child); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public Matrix times(Matrix B) { Matrix A = this; if (A.N != B.M) { throw new RuntimeException("Illegal matrix dimensions."); } Matrix C = new Matrix(A.M, B.N); for (int i = 0; i < C.M; i++) { for (int j = 0; j < C.N; j++) { for (int k = 0; k < A.N; k++) { C.data[i][j] += A.data[i][k] * B.data[k][j]; } } } return C; } }
public class class_name { public Matrix times(Matrix B) { Matrix A = this; if (A.N != B.M) { throw new RuntimeException("Illegal matrix dimensions."); } Matrix C = new Matrix(A.M, B.N); for (int i = 0; i < C.M; i++) { for (int j = 0; j < C.N; j++) { for (int k = 0; k < A.N; k++) { C.data[i][j] += A.data[i][k] * B.data[k][j]; // depends on control dependency: [for], data = [k] } } } return C; } }
public class class_name { private void initMacroContent() throws CmsException { I_CmsFormatterBean formatterConfig = OpenCms.getADEManager().getCachedFormatters( m_cms.getRequestContext().getCurrentProject().isOnlineProject()).getFormatters().get( m_element.getFormatterId()); if (formatterConfig instanceof CmsMacroFormatterBean) { CmsMacroFormatterBean config = (CmsMacroFormatterBean)formatterConfig; m_input = config.getMacroInput(); m_formatterReferences = config.getReferencedFormatters(); if (m_element.isInMemoryOnly()) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(config.getPlaceholderMacroInput())) { m_input = config.getPlaceholderMacroInput(); } if (config.getDefaultContentStructureId() != null) { try { CmsResource defaultContent = m_cms.readResource( ((CmsMacroFormatterBean)formatterConfig).getDefaultContentStructureId()); CmsFile defaultFile = m_cms.readFile(defaultContent); m_element = new CmsContainerElementBean( defaultFile, m_element.getFormatterId(), m_element.getIndividualSettings(), true, m_element.editorHash(), m_element.isCreateNew()); } catch (CmsException e) { LOG.error("Error reading default content for new resource", e); } } } } else { // only as a fall back, should not be used m_formatterReferences = new HashMap<String, CmsUUID>(); CmsResource macroContent = m_cms.readResource(m_element.getFormatterId()); CmsXmlContent xmlContent = CmsXmlContentFactory.unmarshal(m_cms, macroContent, m_request); m_input = xmlContent.getStringValue(m_cms, CmsFormatterBeanParser.N_MACRO, CmsLocaleManager.MASTER_LOCALE); List<I_CmsXmlContentValue> formatters = xmlContent.getValues( CmsFormatterBeanParser.N_FORMATTERS, CmsLocaleManager.MASTER_LOCALE); for (I_CmsXmlContentValue formatterValue : formatters) { CmsXmlVfsFileValue file = (CmsXmlVfsFileValue)xmlContent.getValue( formatterValue.getPath() + "/" + CmsFormatterBeanParser.N_FORMATTER, CmsLocaleManager.MASTER_LOCALE); String macroName = xmlContent.getStringValue( m_cms, formatterValue.getPath() + "/" + CmsFormatterBeanParser.N_MACRO_NAME, CmsLocaleManager.MASTER_LOCALE); m_formatterReferences.put(macroName, file.getLink(m_cms).getStructureId()); } } } }
public class class_name { private void initMacroContent() throws CmsException { I_CmsFormatterBean formatterConfig = OpenCms.getADEManager().getCachedFormatters( m_cms.getRequestContext().getCurrentProject().isOnlineProject()).getFormatters().get( m_element.getFormatterId()); if (formatterConfig instanceof CmsMacroFormatterBean) { CmsMacroFormatterBean config = (CmsMacroFormatterBean)formatterConfig; m_input = config.getMacroInput(); m_formatterReferences = config.getReferencedFormatters(); if (m_element.isInMemoryOnly()) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(config.getPlaceholderMacroInput())) { m_input = config.getPlaceholderMacroInput(); // depends on control dependency: [if], data = [none] } if (config.getDefaultContentStructureId() != null) { try { CmsResource defaultContent = m_cms.readResource( ((CmsMacroFormatterBean)formatterConfig).getDefaultContentStructureId()); CmsFile defaultFile = m_cms.readFile(defaultContent); m_element = new CmsContainerElementBean( defaultFile, m_element.getFormatterId(), m_element.getIndividualSettings(), true, m_element.editorHash(), m_element.isCreateNew()); // depends on control dependency: [try], data = [none] } catch (CmsException e) { LOG.error("Error reading default content for new resource", e); } // depends on control dependency: [catch], data = [none] } } } else { // only as a fall back, should not be used m_formatterReferences = new HashMap<String, CmsUUID>(); CmsResource macroContent = m_cms.readResource(m_element.getFormatterId()); CmsXmlContent xmlContent = CmsXmlContentFactory.unmarshal(m_cms, macroContent, m_request); m_input = xmlContent.getStringValue(m_cms, CmsFormatterBeanParser.N_MACRO, CmsLocaleManager.MASTER_LOCALE); List<I_CmsXmlContentValue> formatters = xmlContent.getValues( CmsFormatterBeanParser.N_FORMATTERS, CmsLocaleManager.MASTER_LOCALE); for (I_CmsXmlContentValue formatterValue : formatters) { CmsXmlVfsFileValue file = (CmsXmlVfsFileValue)xmlContent.getValue( formatterValue.getPath() + "/" + CmsFormatterBeanParser.N_FORMATTER, CmsLocaleManager.MASTER_LOCALE); String macroName = xmlContent.getStringValue( m_cms, formatterValue.getPath() + "/" + CmsFormatterBeanParser.N_MACRO_NAME, CmsLocaleManager.MASTER_LOCALE); m_formatterReferences.put(macroName, file.getLink(m_cms).getStructureId()); // depends on control dependency: [for], data = [none] } } } }
public class class_name { public void setResourceRecords(java.util.Collection<ResourceRecord> resourceRecords) { if (resourceRecords == null) { this.resourceRecords = null; return; } this.resourceRecords = new com.amazonaws.internal.SdkInternalList<ResourceRecord>(resourceRecords); } }
public class class_name { public void setResourceRecords(java.util.Collection<ResourceRecord> resourceRecords) { if (resourceRecords == null) { this.resourceRecords = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.resourceRecords = new com.amazonaws.internal.SdkInternalList<ResourceRecord>(resourceRecords); } }
public class class_name { public GroupConversionType<GetterType<T>> getOrCreateConvertGroup() { List<Node> nodeList = childNode.get("convert-group"); if (nodeList != null && nodeList.size() > 0) { return new GroupConversionTypeImpl<GetterType<T>>(this, "convert-group", childNode, nodeList.get(0)); } return createConvertGroup(); } }
public class class_name { public GroupConversionType<GetterType<T>> getOrCreateConvertGroup() { List<Node> nodeList = childNode.get("convert-group"); if (nodeList != null && nodeList.size() > 0) { return new GroupConversionTypeImpl<GetterType<T>>(this, "convert-group", childNode, nodeList.get(0)); // depends on control dependency: [if], data = [none] } return createConvertGroup(); } }
public class class_name { protected boolean isReplaceAccepted(final String dataUri) { try { final byte[] bytes = dataUri.getBytes(CharEncoding.UTF_8); final boolean exceedLimit = bytes.length >= SIZE_LIMIT; LOG.debug("dataUri size: {}KB, limit exceeded: {}", bytes.length / 1024, exceedLimit); return !exceedLimit; } catch (final UnsupportedEncodingException e) { throw new WroRuntimeException("Should never happen", e); } } }
public class class_name { protected boolean isReplaceAccepted(final String dataUri) { try { final byte[] bytes = dataUri.getBytes(CharEncoding.UTF_8); final boolean exceedLimit = bytes.length >= SIZE_LIMIT; LOG.debug("dataUri size: {}KB, limit exceeded: {}", bytes.length / 1024, exceedLimit); // depends on control dependency: [try], data = [none] return !exceedLimit; // depends on control dependency: [try], data = [none] } catch (final UnsupportedEncodingException e) { throw new WroRuntimeException("Should never happen", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void addChildNodesByPage(NodeData parent, List<NodeData> childs, int fromOrderNum) { boolean inTransaction = cache.isTransactionActive(); try { if (!inTransaction) { cache.beginTransaction(); } cache.setLocal(true); CacheNodesByPageId cacheId = new CacheNodesByPageId(getOwnerId(), parent.getIdentifier()); Map<Integer, Set<String>> pages = (Map<Integer, Set<String>>)cache.get(cacheId); if (pages == null) { pages = new HashMap<Integer, Set<String>>(); } Set<String> set = new HashSet<String>(); for (NodeData child : childs) { putNode(child, ModifyChildOption.NOT_MODIFY); set.add(child.getIdentifier()); } pages.put(fromOrderNum, set); cache.put(cacheId, pages); } finally { cache.setLocal(false); if (!inTransaction) { dedicatedTxCommit(); } } } }
public class class_name { public void addChildNodesByPage(NodeData parent, List<NodeData> childs, int fromOrderNum) { boolean inTransaction = cache.isTransactionActive(); try { if (!inTransaction) { cache.beginTransaction(); // depends on control dependency: [if], data = [none] } cache.setLocal(true); // depends on control dependency: [try], data = [none] CacheNodesByPageId cacheId = new CacheNodesByPageId(getOwnerId(), parent.getIdentifier()); Map<Integer, Set<String>> pages = (Map<Integer, Set<String>>)cache.get(cacheId); if (pages == null) { pages = new HashMap<Integer, Set<String>>(); // depends on control dependency: [if], data = [none] } Set<String> set = new HashSet<String>(); for (NodeData child : childs) { putNode(child, ModifyChildOption.NOT_MODIFY); // depends on control dependency: [for], data = [child] set.add(child.getIdentifier()); // depends on control dependency: [for], data = [child] } pages.put(fromOrderNum, set); // depends on control dependency: [try], data = [none] cache.put(cacheId, pages); // depends on control dependency: [try], data = [none] } finally { cache.setLocal(false); if (!inTransaction) { dedicatedTxCommit(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static Predicate<WebDriver> ajaxCallsCompleted() { return new Predicate<WebDriver>() { @Override public boolean apply(WebDriver driver) { if (isVaadinActive(driver)) { return true; } else { ensureBrowserRenderingTookPlace(); return false; } } }; } }
public class class_name { public static Predicate<WebDriver> ajaxCallsCompleted() { return new Predicate<WebDriver>() { @Override public boolean apply(WebDriver driver) { if (isVaadinActive(driver)) { return true; // depends on control dependency: [if], data = [none] } else { ensureBrowserRenderingTookPlace(); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } } }; } }
public class class_name { public int insertSys(PersistentStore store, Result ins) { RowSetNavigator nav = ins.getNavigator(); int count = 0; while (nav.hasNext()) { insertSys(store, nav.getNext()); count++; } return count; } }
public class class_name { public int insertSys(PersistentStore store, Result ins) { RowSetNavigator nav = ins.getNavigator(); int count = 0; while (nav.hasNext()) { insertSys(store, nav.getNext()); // depends on control dependency: [while], data = [none] count++; // depends on control dependency: [while], data = [none] } return count; } }
public class class_name { public static <T> Iterable<T> filter(final Iterable<T> fromIterable, final Predicate<? super T> predicate) { checkNotNull(fromIterable, "iterable must be non null"); checkNotNull(predicate, "predicate must be non null"); return new SupplierIterable<T>(new Supplier<Optional<T>>() { private final Iterator<T> fromIterator = fromIterable.iterator(); public Optional<T> get() { while (fromIterator.hasNext()) { final T value = fromIterator.next(); if (predicate.test(value)) { return of(value); } } return Optional.empty(); } }); } }
public class class_name { public static <T> Iterable<T> filter(final Iterable<T> fromIterable, final Predicate<? super T> predicate) { checkNotNull(fromIterable, "iterable must be non null"); checkNotNull(predicate, "predicate must be non null"); return new SupplierIterable<T>(new Supplier<Optional<T>>() { private final Iterator<T> fromIterator = fromIterable.iterator(); public Optional<T> get() { while (fromIterator.hasNext()) { final T value = fromIterator.next(); if (predicate.test(value)) { return of(value); // depends on control dependency: [if], data = [none] } } return Optional.empty(); } }); } }
public class class_name { public static Vector3D getSteepestVector(final Vector3D normal, final double epsilon) { if (Math.abs(normal.getX()) < epsilon && Math.abs(normal.getY()) < epsilon) { return new Vector3D(0, 0, 0); } Vector3D slope; if (Math.abs(normal.getX()) < epsilon) { slope = new Vector3D(0, 1, -normal.getY() / normal.getZ()); } else if (Math.abs(normal.getY()) < epsilon) { slope = new Vector3D(1, 0, -normal.getX() / normal.getZ()); } else { slope = new Vector3D(normal.getX() / normal.getY(), 1, -1 / normal.getZ() * (normal.getX() * normal.getX() / normal.getY() + normal.getY())); } //We want the vector to be low-oriented. if (slope.getZ() > epsilon) { slope = new Vector3D(-slope.getX(), -slope.getY(), -slope.getZ()); } //We normalize it return slope.normalize(); } }
public class class_name { public static Vector3D getSteepestVector(final Vector3D normal, final double epsilon) { if (Math.abs(normal.getX()) < epsilon && Math.abs(normal.getY()) < epsilon) { return new Vector3D(0, 0, 0); // depends on control dependency: [if], data = [none] } Vector3D slope; if (Math.abs(normal.getX()) < epsilon) { slope = new Vector3D(0, 1, -normal.getY() / normal.getZ()); // depends on control dependency: [if], data = [none] } else if (Math.abs(normal.getY()) < epsilon) { slope = new Vector3D(1, 0, -normal.getX() / normal.getZ()); // depends on control dependency: [if], data = [none] } else { slope = new Vector3D(normal.getX() / normal.getY(), 1, -1 / normal.getZ() * (normal.getX() * normal.getX() / normal.getY() + normal.getY())); // depends on control dependency: [if], data = [none] } //We want the vector to be low-oriented. if (slope.getZ() > epsilon) { slope = new Vector3D(-slope.getX(), -slope.getY(), -slope.getZ()); // depends on control dependency: [if], data = [none] } //We normalize it return slope.normalize(); } }
public class class_name { protected void configureEndpointAccessToDenyUndefined(final HttpSecurity http, final ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry requests) { val endpoints = casProperties.getMonitor().getEndpoints().getEndpoint().keySet(); val endpointDefaults = casProperties.getMonitor().getEndpoints().getDefaultEndpointProperties(); pathMappedEndpoints.forEach(endpoint -> { val rootPath = endpoint.getRootPath(); if (endpoints.contains(rootPath)) { LOGGER.trace("Endpoint security is defined for endpoint [{}]", rootPath); } else { val defaultAccessRules = endpointDefaults.getAccess(); LOGGER.trace("Endpoint security is NOT defined for endpoint [{}]. Using default security rules [{}]", rootPath, endpointDefaults); val endpointRequest = EndpointRequest.to(rootPath).excludingLinks(); defaultAccessRules.forEach(Unchecked.consumer(access -> configureEndpointAccess(http, requests, access, endpointDefaults, endpointRequest))); } }); } }
public class class_name { protected void configureEndpointAccessToDenyUndefined(final HttpSecurity http, final ExpressionUrlAuthorizationConfigurer<HttpSecurity>.ExpressionInterceptUrlRegistry requests) { val endpoints = casProperties.getMonitor().getEndpoints().getEndpoint().keySet(); val endpointDefaults = casProperties.getMonitor().getEndpoints().getDefaultEndpointProperties(); pathMappedEndpoints.forEach(endpoint -> { val rootPath = endpoint.getRootPath(); if (endpoints.contains(rootPath)) { LOGGER.trace("Endpoint security is defined for endpoint [{}]", rootPath); } else { val defaultAccessRules = endpointDefaults.getAccess(); LOGGER.trace("Endpoint security is NOT defined for endpoint [{}]. Using default security rules [{}]", rootPath, endpointDefaults); val endpointRequest = EndpointRequest.to(rootPath).excludingLinks(); defaultAccessRules.forEach(Unchecked.consumer(access -> configureEndpointAccess(http, requests, access, endpointDefaults, endpointRequest))); } }); // depends on control dependency: [if], data = [none] } }
public class class_name { public SingleFieldBuilder<MType, BType, IType> setMessage( MType message) { if (message == null) { throw new NullPointerException(); } this.message = message; if (builder != null) { builder.dispose(); builder = null; } onChanged(); return this; } }
public class class_name { public SingleFieldBuilder<MType, BType, IType> setMessage( MType message) { if (message == null) { throw new NullPointerException(); } this.message = message; if (builder != null) { builder.dispose(); // depends on control dependency: [if], data = [none] builder = null; // depends on control dependency: [if], data = [none] } onChanged(); return this; } }
public class class_name { public void marshall(ResetCacheRequest resetCacheRequest, ProtocolMarshaller protocolMarshaller) { if (resetCacheRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resetCacheRequest.getGatewayARN(), GATEWAYARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ResetCacheRequest resetCacheRequest, ProtocolMarshaller protocolMarshaller) { if (resetCacheRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resetCacheRequest.getGatewayARN(), GATEWAYARN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Violation checkSuperInstantiation( Set<String> containerTypeParameters, AnnotationInfo annotation, Type type) { Violation info = threadSafeInstantiation(containerTypeParameters, annotation, type); if (info.isPresent()) { return info; } return Streams.zip( type.asElement().getTypeParameters().stream(), type.getTypeArguments().stream(), (typaram, argument) -> { if (containerOfSubtyping(containerTypeParameters, annotation, typaram, argument)) { return Violation.of( String.format( "'%s' is not a container of '%s'", annotation.typeName(), typaram)); } return Violation.absent(); }) .filter(Violation::isPresent) .findFirst() .orElse(Violation.absent()); } }
public class class_name { public Violation checkSuperInstantiation( Set<String> containerTypeParameters, AnnotationInfo annotation, Type type) { Violation info = threadSafeInstantiation(containerTypeParameters, annotation, type); if (info.isPresent()) { return info; // depends on control dependency: [if], data = [none] } return Streams.zip( type.asElement().getTypeParameters().stream(), type.getTypeArguments().stream(), (typaram, argument) -> { if (containerOfSubtyping(containerTypeParameters, annotation, typaram, argument)) { return Violation.of( String.format( "'%s' is not a container of '%s'", annotation.typeName(), typaram)); } return Violation.absent(); }) .filter(Violation::isPresent) .findFirst() .orElse(Violation.absent()); } }
public class class_name { @Override public CommerceShippingMethod remove(Serializable primaryKey) throws NoSuchShippingMethodException { Session session = null; try { session = openSession(); CommerceShippingMethod commerceShippingMethod = (CommerceShippingMethod)session.get(CommerceShippingMethodImpl.class, primaryKey); if (commerceShippingMethod == null) { if (_log.isDebugEnabled()) { _log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey); } throw new NoSuchShippingMethodException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey); } return remove(commerceShippingMethod); } catch (NoSuchShippingMethodException nsee) { throw nsee; } catch (Exception e) { throw processException(e); } finally { closeSession(session); } } }
public class class_name { @Override public CommerceShippingMethod remove(Serializable primaryKey) throws NoSuchShippingMethodException { Session session = null; try { session = openSession(); CommerceShippingMethod commerceShippingMethod = (CommerceShippingMethod)session.get(CommerceShippingMethodImpl.class, primaryKey); if (commerceShippingMethod == null) { if (_log.isDebugEnabled()) { _log.debug(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey); // depends on control dependency: [if], data = [none] } throw new NoSuchShippingMethodException(_NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey); } return remove(commerceShippingMethod); } catch (NoSuchShippingMethodException nsee) { throw nsee; } catch (Exception e) { throw processException(e); } finally { closeSession(session); } } }
public class class_name { public void setImportSite(String importSite) { checkFrozen(); if (importSite != null) { importSite = importSite.trim(); } m_site = importSite; m_hasImportSite = true; } }
public class class_name { public void setImportSite(String importSite) { checkFrozen(); if (importSite != null) { importSite = importSite.trim(); // depends on control dependency: [if], data = [none] } m_site = importSite; m_hasImportSite = true; } }
public class class_name { @Override public void clear() { clearObserver.begin(); try { statusTransitioner.checkAvailable(); try { store.clear(); clearObserver.end(ClearOutcome.SUCCESS); } catch (StoreAccessException e) { resilienceStrategy.clearFailure(e); clearObserver.end(ClearOutcome.FAILURE); } } catch (Throwable e) { clearObserver.end(ClearOutcome.FAILURE); throw e; } } }
public class class_name { @Override public void clear() { clearObserver.begin(); try { statusTransitioner.checkAvailable(); // depends on control dependency: [try], data = [none] try { store.clear(); // depends on control dependency: [try], data = [none] clearObserver.end(ClearOutcome.SUCCESS); // depends on control dependency: [try], data = [none] } catch (StoreAccessException e) { resilienceStrategy.clearFailure(e); clearObserver.end(ClearOutcome.FAILURE); } // depends on control dependency: [catch], data = [none] } catch (Throwable e) { clearObserver.end(ClearOutcome.FAILURE); throw e; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void removeRecursive(ResultHierarchy hierarchy, Result child) { for(It<Result> iter = hierarchy.iterParents(child); iter.valid(); iter.advance()) { hierarchy.remove(iter.get(), child); } for(It<Result> iter = hierarchy.iterChildren(child); iter.valid(); iter.advance()) { removeRecursive(hierarchy, iter.get()); } } }
public class class_name { public static void removeRecursive(ResultHierarchy hierarchy, Result child) { for(It<Result> iter = hierarchy.iterParents(child); iter.valid(); iter.advance()) { hierarchy.remove(iter.get(), child); // depends on control dependency: [for], data = [iter] } for(It<Result> iter = hierarchy.iterChildren(child); iter.valid(); iter.advance()) { removeRecursive(hierarchy, iter.get()); // depends on control dependency: [for], data = [iter] } } }
public class class_name { public static float getTypeDifferenceWeight(Class<?> srcClass, Class<?> destClass) { if (srcClass == null) { return Float.MAX_VALUE; } if (destClass != null) { if (srcClass.isArray() && destClass.isArray()) { srcClass = srcClass.getComponentType(); destClass = destClass.getComponentType(); } if ((destClass.isPrimitive() && srcClass.equals(TypeUtils.getPrimitiveWrapper(destClass))) || (srcClass.isPrimitive() && destClass.equals(TypeUtils.getPrimitiveWrapper(srcClass)))) { return 0.1f; } } float weight = 0.0f; while (destClass != null) { if (destClass.equals(srcClass)) { if (destClass.isInterface()) { // slight penalty for interface match. // we still want an exact match to override an interface match, but // an interface match should override anything where we have to get a // superclass. weight += 0.25f; } break; } weight++; destClass = destClass.getSuperclass(); } /* * If the destination class is null, we've travelled all the way up to * an Object match. We'll penalize this by adding 1.5 to the cost. */ if (destClass == null) { weight += 1.5f; } return weight; } }
public class class_name { public static float getTypeDifferenceWeight(Class<?> srcClass, Class<?> destClass) { if (srcClass == null) { return Float.MAX_VALUE; // depends on control dependency: [if], data = [none] } if (destClass != null) { if (srcClass.isArray() && destClass.isArray()) { srcClass = srcClass.getComponentType(); // depends on control dependency: [if], data = [none] destClass = destClass.getComponentType(); // depends on control dependency: [if], data = [none] } if ((destClass.isPrimitive() && srcClass.equals(TypeUtils.getPrimitiveWrapper(destClass))) || (srcClass.isPrimitive() && destClass.equals(TypeUtils.getPrimitiveWrapper(srcClass)))) { return 0.1f; // depends on control dependency: [if], data = [none] } } float weight = 0.0f; while (destClass != null) { if (destClass.equals(srcClass)) { if (destClass.isInterface()) { // slight penalty for interface match. // we still want an exact match to override an interface match, but // an interface match should override anything where we have to get a // superclass. weight += 0.25f; // depends on control dependency: [if], data = [none] } break; } weight++; // depends on control dependency: [while], data = [none] destClass = destClass.getSuperclass(); // depends on control dependency: [while], data = [none] } /* * If the destination class is null, we've travelled all the way up to * an Object match. We'll penalize this by adding 1.5 to the cost. */ if (destClass == null) { weight += 1.5f; // depends on control dependency: [if], data = [none] } return weight; } }
public class class_name { void addTags(Message<?> message, SpanCustomizer result, MessageChannel channel) { // TODO topic etc if (channel != null) { result.tag("channel", messageChannelName(channel)); } } }
public class class_name { void addTags(Message<?> message, SpanCustomizer result, MessageChannel channel) { // TODO topic etc if (channel != null) { result.tag("channel", messageChannelName(channel)); // depends on control dependency: [if], data = [(channel] } } }
public class class_name { public static boolean handleIfNoneMatch(final String ifNoneMatch, final List<ETag> etags, boolean allowWeak) { if (ifNoneMatch == null) { return true; } List<ETag> parts = parseETagList(ifNoneMatch); for (ETag part : parts) { if (part.getTag().equals("*")) { return false; } if (part.isWeak() && !allowWeak) { continue; } for (ETag tag : etags) { if (tag != null) { if (tag.isWeak() && !allowWeak) { continue; } if (tag.getTag().equals(part.getTag())) { return false; } } } } return true; } }
public class class_name { public static boolean handleIfNoneMatch(final String ifNoneMatch, final List<ETag> etags, boolean allowWeak) { if (ifNoneMatch == null) { return true; // depends on control dependency: [if], data = [none] } List<ETag> parts = parseETagList(ifNoneMatch); for (ETag part : parts) { if (part.getTag().equals("*")) { return false; // depends on control dependency: [if], data = [none] } if (part.isWeak() && !allowWeak) { continue; } for (ETag tag : etags) { if (tag != null) { if (tag.isWeak() && !allowWeak) { continue; } if (tag.getTag().equals(part.getTag())) { return false; // depends on control dependency: [if], data = [none] } } } } return true; } }
public class class_name { public Expression boxAsSoyValueProvider() { if (soyType().equals(NullType.getInstance())) { if (delegate == NULL || delegate == NULL_BOXED) { return FieldRef.NULL_PROVIDER.accessor(); } // otherwise this expression might have side effects, evaluate it as a statement then return // the NULL_PROVIDER return toStatement().then(FieldRef.NULL_PROVIDER.accessor()); } if (delegate.isNonNullable()) { // Every SoyValue is-a SoyValueProvider, so if it is non-null return box(); } if (isBoxed()) { return new Expression( BytecodeUtils.SOY_VALUE_PROVIDER_TYPE, delegate.features().plus(Feature.NON_NULLABLE)) { @Override protected void doGen(CodeBuilder adapter) { Label end = new Label(); delegate.gen(adapter); adapter.dup(); adapter.ifNonNull(end); adapter.pop(); FieldRef.NULL_PROVIDER.accessStaticUnchecked(adapter); adapter.mark(end); } }; } return new Expression( BytecodeUtils.SOY_VALUE_PROVIDER_TYPE, delegate.features().plus(Feature.NON_NULLABLE)) { @Override protected void doGen(CodeBuilder adapter) { Label end = new Label(); delegate.gen(adapter); adapter.dup(); Label nonNull = new Label(); adapter.ifNonNull(nonNull); adapter.pop(); // pop the null value and replace with the nullprovider FieldRef.NULL_PROVIDER.accessStaticUnchecked(adapter); adapter.goTo(end); adapter.mark(nonNull); doBox(adapter, soyRuntimeType); adapter.mark(end); } }; } }
public class class_name { public Expression boxAsSoyValueProvider() { if (soyType().equals(NullType.getInstance())) { if (delegate == NULL || delegate == NULL_BOXED) { return FieldRef.NULL_PROVIDER.accessor(); // depends on control dependency: [if], data = [none] } // otherwise this expression might have side effects, evaluate it as a statement then return // the NULL_PROVIDER return toStatement().then(FieldRef.NULL_PROVIDER.accessor()); // depends on control dependency: [if], data = [none] } if (delegate.isNonNullable()) { // Every SoyValue is-a SoyValueProvider, so if it is non-null return box(); // depends on control dependency: [if], data = [none] } if (isBoxed()) { return new Expression( BytecodeUtils.SOY_VALUE_PROVIDER_TYPE, delegate.features().plus(Feature.NON_NULLABLE)) { @Override protected void doGen(CodeBuilder adapter) { Label end = new Label(); delegate.gen(adapter); adapter.dup(); adapter.ifNonNull(end); adapter.pop(); FieldRef.NULL_PROVIDER.accessStaticUnchecked(adapter); adapter.mark(end); } }; // depends on control dependency: [if], data = [none] } return new Expression( BytecodeUtils.SOY_VALUE_PROVIDER_TYPE, delegate.features().plus(Feature.NON_NULLABLE)) { @Override protected void doGen(CodeBuilder adapter) { Label end = new Label(); delegate.gen(adapter); adapter.dup(); Label nonNull = new Label(); adapter.ifNonNull(nonNull); adapter.pop(); // pop the null value and replace with the nullprovider FieldRef.NULL_PROVIDER.accessStaticUnchecked(adapter); adapter.goTo(end); adapter.mark(nonNull); doBox(adapter, soyRuntimeType); adapter.mark(end); } }; } }
public class class_name { private void checkNameVisibility(Scope scope, Node name) { if (!name.isName()) { return; } Var var = scope.getVar(name.getString()); if (var == null) { return; } Visibility v = checkPrivateNameConvention( AccessControlUtils.getEffectiveNameVisibility( name, var, defaultVisibilityForFiles), name); switch (v) { case PACKAGE: if (!isPackageAccessAllowed(var, name)) { compiler.report( JSError.make( name, BAD_PACKAGE_PROPERTY_ACCESS, name.getString(), var.getSourceFile().getName())); } break; case PRIVATE: if (!isPrivateAccessAllowed(var, name)) { compiler.report( JSError.make( name, BAD_PRIVATE_GLOBAL_ACCESS, name.getString(), var.getSourceFile().getName())); } break; default: // Nothing to do for PUBLIC and PROTECTED // (which is irrelevant for names). break; } } }
public class class_name { private void checkNameVisibility(Scope scope, Node name) { if (!name.isName()) { return; // depends on control dependency: [if], data = [none] } Var var = scope.getVar(name.getString()); if (var == null) { return; // depends on control dependency: [if], data = [none] } Visibility v = checkPrivateNameConvention( AccessControlUtils.getEffectiveNameVisibility( name, var, defaultVisibilityForFiles), name); switch (v) { case PACKAGE: if (!isPackageAccessAllowed(var, name)) { compiler.report( JSError.make( name, BAD_PACKAGE_PROPERTY_ACCESS, name.getString(), var.getSourceFile().getName())); // depends on control dependency: [if], data = [none] } break; case PRIVATE: if (!isPrivateAccessAllowed(var, name)) { compiler.report( JSError.make( name, BAD_PRIVATE_GLOBAL_ACCESS, name.getString(), var.getSourceFile().getName())); // depends on control dependency: [if], data = [none] } break; default: // Nothing to do for PUBLIC and PROTECTED // (which is irrelevant for names). break; } } }
public class class_name { public void setFromCornersProperties(Point3d p1, Point3d p2) { if (p1.getX()<p2.getX()) { this.minxProperty = p1.xProperty; this.maxxProperty = p2.xProperty; } else { this.minxProperty = p2.xProperty; this.maxxProperty = p1.xProperty; } if (p1.getY()<p2.getY()) { this.minyProperty = p1.yProperty; this.maxyProperty = p2.yProperty; } else { this.minyProperty = p2.yProperty; this.maxyProperty = p1.yProperty; } if (p1.getZ()<p2.getZ()) { this.minzProperty = p1.zProperty; this.maxzProperty = p2.zProperty; } else { this.minzProperty = p2.zProperty; this.maxzProperty = p1.zProperty; } } }
public class class_name { public void setFromCornersProperties(Point3d p1, Point3d p2) { if (p1.getX()<p2.getX()) { this.minxProperty = p1.xProperty; // depends on control dependency: [if], data = [none] this.maxxProperty = p2.xProperty; // depends on control dependency: [if], data = [none] } else { this.minxProperty = p2.xProperty; // depends on control dependency: [if], data = [none] this.maxxProperty = p1.xProperty; // depends on control dependency: [if], data = [none] } if (p1.getY()<p2.getY()) { this.minyProperty = p1.yProperty; // depends on control dependency: [if], data = [none] this.maxyProperty = p2.yProperty; // depends on control dependency: [if], data = [none] } else { this.minyProperty = p2.yProperty; // depends on control dependency: [if], data = [none] this.maxyProperty = p1.yProperty; // depends on control dependency: [if], data = [none] } if (p1.getZ()<p2.getZ()) { this.minzProperty = p1.zProperty; // depends on control dependency: [if], data = [none] this.maxzProperty = p2.zProperty; // depends on control dependency: [if], data = [none] } else { this.minzProperty = p2.zProperty; // depends on control dependency: [if], data = [none] this.maxzProperty = p1.zProperty; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void copyResources( File sourceDirectory, File targetDirectory ) throws MojoExecutionException { if ( !sourceDirectory.exists() ) { getLogger().info( "Directory does not exist " + sourceDirectory.getAbsolutePath() ); } else { if ( !sourceDirectory.isDirectory() ) { getLogger().debug( "Not a directory: " + sourceDirectory.getAbsolutePath() ); } else { getLogger().debug( "Copying resources from " + sourceDirectory.getAbsolutePath() ); // this may needs to be parametrized somehow String excludes = concat( DirectoryScanner.DEFAULTEXCLUDES, ", " ); copyDirectoryStructure( sourceDirectory, targetDirectory, "**", excludes ); } } } }
public class class_name { public void copyResources( File sourceDirectory, File targetDirectory ) throws MojoExecutionException { if ( !sourceDirectory.exists() ) { getLogger().info( "Directory does not exist " + sourceDirectory.getAbsolutePath() ); } else { if ( !sourceDirectory.isDirectory() ) { getLogger().debug( "Not a directory: " + sourceDirectory.getAbsolutePath() ); // depends on control dependency: [if], data = [none] } else { getLogger().debug( "Copying resources from " + sourceDirectory.getAbsolutePath() ); // depends on control dependency: [if], data = [none] // this may needs to be parametrized somehow String excludes = concat( DirectoryScanner.DEFAULTEXCLUDES, ", " ); copyDirectoryStructure( sourceDirectory, targetDirectory, "**", excludes ); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public Observable<ServiceResponse<Page<RouteFilterRuleInner>>> listByRouteFilterWithServiceResponseAsync(final String resourceGroupName, final String routeFilterName) { return listByRouteFilterSinglePageAsync(resourceGroupName, routeFilterName) .concatMap(new Func1<ServiceResponse<Page<RouteFilterRuleInner>>, Observable<ServiceResponse<Page<RouteFilterRuleInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteFilterRuleInner>>> call(ServiceResponse<Page<RouteFilterRuleInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); } return Observable.just(page).concatWith(listByRouteFilterNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public Observable<ServiceResponse<Page<RouteFilterRuleInner>>> listByRouteFilterWithServiceResponseAsync(final String resourceGroupName, final String routeFilterName) { return listByRouteFilterSinglePageAsync(resourceGroupName, routeFilterName) .concatMap(new Func1<ServiceResponse<Page<RouteFilterRuleInner>>, Observable<ServiceResponse<Page<RouteFilterRuleInner>>>>() { @Override public Observable<ServiceResponse<Page<RouteFilterRuleInner>>> call(ServiceResponse<Page<RouteFilterRuleInner>> page) { String nextPageLink = page.body().nextPageLink(); if (nextPageLink == null) { return Observable.just(page); // depends on control dependency: [if], data = [none] } return Observable.just(page).concatWith(listByRouteFilterNextWithServiceResponseAsync(nextPageLink)); } }); } }
public class class_name { public static int getEndPos(JCTree tree, EndPosTable endPosTable) { if (tree == null) return Position.NOPOS; if (endPosTable == null) { // fall back on limited info in the tree return endPos(tree); } int mapPos = endPosTable.getEndPos(tree); if (mapPos != Position.NOPOS) return mapPos; switch(tree.getTag()) { case BITOR_ASG: case BITXOR_ASG: case BITAND_ASG: case SL_ASG: case SR_ASG: case USR_ASG: case PLUS_ASG: case MINUS_ASG: case MUL_ASG: case DIV_ASG: case MOD_ASG: return getEndPos(((JCAssignOp) tree).rhs, endPosTable); case OR: case AND: case BITOR: case BITXOR: case BITAND: case EQ: case NE: case LT: case GT: case LE: case GE: case SL: case SR: case USR: case PLUS: case MINUS: case MUL: case DIV: case MOD: return getEndPos(((JCBinary) tree).rhs, endPosTable); case CASE: return getEndPos(((JCCase) tree).stats.last(), endPosTable); case CATCH: return getEndPos(((JCCatch) tree).body, endPosTable); case CONDEXPR: return getEndPos(((JCConditional) tree).falsepart, endPosTable); case FORLOOP: return getEndPos(((JCForLoop) tree).body, endPosTable); case FOREACHLOOP: return getEndPos(((JCEnhancedForLoop) tree).body, endPosTable); case IF: { JCIf node = (JCIf)tree; if (node.elsepart == null) { return getEndPos(node.thenpart, endPosTable); } else { return getEndPos(node.elsepart, endPosTable); } } case LABELLED: return getEndPos(((JCLabeledStatement) tree).body, endPosTable); case MODIFIERS: return getEndPos(((JCModifiers) tree).annotations.last(), endPosTable); case SYNCHRONIZED: return getEndPos(((JCSynchronized) tree).body, endPosTable); case TOPLEVEL: return getEndPos(((JCCompilationUnit) tree).defs.last(), endPosTable); case TRY: { JCTry node = (JCTry)tree; if (node.finalizer != null) { return getEndPos(node.finalizer, endPosTable); } else if (!node.catchers.isEmpty()) { return getEndPos(node.catchers.last(), endPosTable); } else { return getEndPos(node.body, endPosTable); } } case WILDCARD: return getEndPos(((JCWildcard) tree).inner, endPosTable); case TYPECAST: return getEndPos(((JCTypeCast) tree).expr, endPosTable); case TYPETEST: return getEndPos(((JCInstanceOf) tree).clazz, endPosTable); case POS: case NEG: case NOT: case COMPL: case PREINC: case PREDEC: return getEndPos(((JCUnary) tree).arg, endPosTable); case WHILELOOP: return getEndPos(((JCWhileLoop) tree).body, endPosTable); case ANNOTATED_TYPE: return getEndPos(((JCAnnotatedType) tree).underlyingType, endPosTable); case ERRONEOUS: { JCErroneous node = (JCErroneous)tree; if (node.errs != null && node.errs.nonEmpty()) return getEndPos(node.errs.last(), endPosTable); } } return Position.NOPOS; } }
public class class_name { public static int getEndPos(JCTree tree, EndPosTable endPosTable) { if (tree == null) return Position.NOPOS; if (endPosTable == null) { // fall back on limited info in the tree return endPos(tree); // depends on control dependency: [if], data = [none] } int mapPos = endPosTable.getEndPos(tree); if (mapPos != Position.NOPOS) return mapPos; switch(tree.getTag()) { case BITOR_ASG: case BITXOR_ASG: case BITAND_ASG: case SL_ASG: case SR_ASG: case USR_ASG: case PLUS_ASG: case MINUS_ASG: case MUL_ASG: case DIV_ASG: case MOD_ASG: return getEndPos(((JCAssignOp) tree).rhs, endPosTable); case OR: case AND: case BITOR: case BITXOR: case BITAND: case EQ: case NE: case LT: case GT: case LE: case GE: case SL: case SR: case USR: case PLUS: case MINUS: case MUL: case DIV: case MOD: return getEndPos(((JCBinary) tree).rhs, endPosTable); case CASE: return getEndPos(((JCCase) tree).stats.last(), endPosTable); case CATCH: return getEndPos(((JCCatch) tree).body, endPosTable); case CONDEXPR: return getEndPos(((JCConditional) tree).falsepart, endPosTable); case FORLOOP: return getEndPos(((JCForLoop) tree).body, endPosTable); case FOREACHLOOP: return getEndPos(((JCEnhancedForLoop) tree).body, endPosTable); case IF: { JCIf node = (JCIf)tree; if (node.elsepart == null) { return getEndPos(node.thenpart, endPosTable); // depends on control dependency: [if], data = [none] } else { return getEndPos(node.elsepart, endPosTable); // depends on control dependency: [if], data = [(node.elsepart] } } case LABELLED: return getEndPos(((JCLabeledStatement) tree).body, endPosTable); case MODIFIERS: return getEndPos(((JCModifiers) tree).annotations.last(), endPosTable); case SYNCHRONIZED: return getEndPos(((JCSynchronized) tree).body, endPosTable); case TOPLEVEL: return getEndPos(((JCCompilationUnit) tree).defs.last(), endPosTable); case TRY: { JCTry node = (JCTry)tree; if (node.finalizer != null) { return getEndPos(node.finalizer, endPosTable); } else if (!node.catchers.isEmpty()) { return getEndPos(node.catchers.last(), endPosTable); } else { return getEndPos(node.body, endPosTable); } } case WILDCARD: return getEndPos(((JCWildcard) tree).inner, endPosTable); case TYPECAST: return getEndPos(((JCTypeCast) tree).expr, endPosTable); case TYPETEST: return getEndPos(((JCInstanceOf) tree).clazz, endPosTable); case POS: case NEG: case NOT: case COMPL: case PREINC: case PREDEC: return getEndPos(((JCUnary) tree).arg, endPosTable); case WHILELOOP: return getEndPos(((JCWhileLoop) tree).body, endPosTable); case ANNOTATED_TYPE: return getEndPos(((JCAnnotatedType) tree).underlyingType, endPosTable); case ERRONEOUS: { JCErroneous node = (JCErroneous)tree; if (node.errs != null && node.errs.nonEmpty()) return getEndPos(node.errs.last(), endPosTable); } } return Position.NOPOS; } }
public class class_name { public Result getResult(boolean percentage) { float p = A_cap_B_size / (float) B_size; float r = A_cap_B_size / (float) A_size; if (percentage) { p *= 100; r *= 100; } float oov_r = Float.NaN; if (OOV > 0) { oov_r = OOV_R / (float) OOV; if (percentage) oov_r *= 100; } float iv_r = Float.NaN; if (IV > 0) { iv_r = IV_R / (float) IV; if (percentage) iv_r *= 100; } return new Result(p, r, 2 * p * r / (p + r), oov_r, iv_r); } }
public class class_name { public Result getResult(boolean percentage) { float p = A_cap_B_size / (float) B_size; float r = A_cap_B_size / (float) A_size; if (percentage) { p *= 100; // depends on control dependency: [if], data = [none] r *= 100; // depends on control dependency: [if], data = [none] } float oov_r = Float.NaN; if (OOV > 0) { oov_r = OOV_R / (float) OOV; // depends on control dependency: [if], data = [none] if (percentage) oov_r *= 100; } float iv_r = Float.NaN; if (IV > 0) { iv_r = IV_R / (float) IV; // depends on control dependency: [if], data = [none] if (percentage) iv_r *= 100; } return new Result(p, r, 2 * p * r / (p + r), oov_r, iv_r); } }
public class class_name { private StandardUnit getMetricUnit(final String metricName) { String result = null; if (metricName.equals(Constants.CPU_UTILIZATION)) { result = Constants.UNIT_PERCENT; } else if (metricName.equals(Constants.DISK_READ_BYTES)) { result = Constants.UNIT_BYTES; } else if (metricName.equals(Constants.DISK_READ_OPS)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.DISK_WRITE_BYTES)) { result = Constants.UNIT_BYTES; } else if (metricName.equals(Constants.DISK_WRITE_OPS)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.NETWORK_IN)) { result = Constants.UNIT_BYTES; } else if (metricName.equals(Constants.NETWORK_OUT)) { result = Constants.UNIT_BYTES; } else if (metricName.equals(Constants.NETWORK_PACKETS_IN)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.NETWORK_PACKETS_OUT)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.STATUS_CHECK_FAILED)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.STATUS_CHECK_FAILED_INSTANCE)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.STATUS_CHECK_FAILED_SYSTEM)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.CPU_CREDIT_USAGE)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.ESTIMATED_CHARGES)) { result = Constants.UNIT_COUNT; } else if (metricName.equals(Constants.CPU_CREDIT_BALANCE)) { result = Constants.UNIT_COUNT; } return StandardUnit.fromValue(result); } }
public class class_name { private StandardUnit getMetricUnit(final String metricName) { String result = null; if (metricName.equals(Constants.CPU_UTILIZATION)) { result = Constants.UNIT_PERCENT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.DISK_READ_BYTES)) { result = Constants.UNIT_BYTES; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.DISK_READ_OPS)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.DISK_WRITE_BYTES)) { result = Constants.UNIT_BYTES; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.DISK_WRITE_OPS)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.NETWORK_IN)) { result = Constants.UNIT_BYTES; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.NETWORK_OUT)) { result = Constants.UNIT_BYTES; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.NETWORK_PACKETS_IN)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.NETWORK_PACKETS_OUT)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.STATUS_CHECK_FAILED)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.STATUS_CHECK_FAILED_INSTANCE)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.STATUS_CHECK_FAILED_SYSTEM)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.CPU_CREDIT_USAGE)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.ESTIMATED_CHARGES)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } else if (metricName.equals(Constants.CPU_CREDIT_BALANCE)) { result = Constants.UNIT_COUNT; // depends on control dependency: [if], data = [none] } return StandardUnit.fromValue(result); } }
public class class_name { static void runTransformedLearner(boolean withCache) { // setup SULs and counters StateLocalInputSUL<Integer, Character> target = SUL; ResetCounterStateLocalInputSUL<Integer, Character> resetCounter = new ResetCounterStateLocalInputSUL<>("Resets", target); SymbolCounterStateLocalInputSUL<Integer, Character> symbolCounter = new SymbolCounterStateLocalInputSUL<>("Symbols", resetCounter); // construct a (regular) simulator membership query oracle MealyMembershipOracle<Integer, OutputAndLocalInputs<Integer, Character>> mqOracle = new StateLocalInputSULOracle<>(symbolCounter); // construct storage for EquivalenceOracle chain, because we want to use the potential cache as well List<MealyEquivalenceOracle<Integer, OutputAndLocalInputs<Integer, Character>>> eqOracles = new ArrayList<>(2); if (withCache) { MealyCacheOracle<Integer, OutputAndLocalInputs<Integer, Character>> mqCache = MealyCaches.createStateLocalInputTreeCache(INPUTS, mqOracle); eqOracles.add(mqCache.createCacheConsistencyTest()); mqOracle = mqCache; } // construct L* instance ExtensibleLStarMealy<Integer, OutputAndLocalInputs<Integer, Character>> lstar = new ExtensibleLStarMealyBuilder<Integer, OutputAndLocalInputs<Integer, Character>>().withAlphabet(INPUTS) .withOracle(mqOracle) .withCexHandler( ObservationTableCEXHandlers.RIVEST_SCHAPIRE) .create(); // here, we simply fallback to an equivalence check for the transformed automaton model eqOracles.add(new MealySimulatorEQOracle<>(TRANSFORMED_TARGET)); // construct single EQ oracle MealyEquivalenceOracle<Integer, OutputAndLocalInputs<Integer, Character>> eqOracle = new MealyEQOracleChain<>(eqOracles); // construct the experiment MealyExperiment<Integer, OutputAndLocalInputs<Integer, Character>> experiment = new MealyExperiment<>(lstar, eqOracle, INPUTS); // run experiment experiment.run(); // report results System.out.println("Transformed Hypothesis" + (withCache ? ", with cache" : "")); System.out.println("-------------------------------------------------------"); System.out.println(resetCounter.getStatisticalData().getSummary()); System.out.println(symbolCounter.getStatisticalData().getSummary()); System.out.println("-------------------------------------------------------"); } }
public class class_name { static void runTransformedLearner(boolean withCache) { // setup SULs and counters StateLocalInputSUL<Integer, Character> target = SUL; ResetCounterStateLocalInputSUL<Integer, Character> resetCounter = new ResetCounterStateLocalInputSUL<>("Resets", target); SymbolCounterStateLocalInputSUL<Integer, Character> symbolCounter = new SymbolCounterStateLocalInputSUL<>("Symbols", resetCounter); // construct a (regular) simulator membership query oracle MealyMembershipOracle<Integer, OutputAndLocalInputs<Integer, Character>> mqOracle = new StateLocalInputSULOracle<>(symbolCounter); // construct storage for EquivalenceOracle chain, because we want to use the potential cache as well List<MealyEquivalenceOracle<Integer, OutputAndLocalInputs<Integer, Character>>> eqOracles = new ArrayList<>(2); if (withCache) { MealyCacheOracle<Integer, OutputAndLocalInputs<Integer, Character>> mqCache = MealyCaches.createStateLocalInputTreeCache(INPUTS, mqOracle); eqOracles.add(mqCache.createCacheConsistencyTest()); // depends on control dependency: [if], data = [none] mqOracle = mqCache; // depends on control dependency: [if], data = [none] } // construct L* instance ExtensibleLStarMealy<Integer, OutputAndLocalInputs<Integer, Character>> lstar = new ExtensibleLStarMealyBuilder<Integer, OutputAndLocalInputs<Integer, Character>>().withAlphabet(INPUTS) .withOracle(mqOracle) .withCexHandler( ObservationTableCEXHandlers.RIVEST_SCHAPIRE) .create(); // here, we simply fallback to an equivalence check for the transformed automaton model eqOracles.add(new MealySimulatorEQOracle<>(TRANSFORMED_TARGET)); // construct single EQ oracle MealyEquivalenceOracle<Integer, OutputAndLocalInputs<Integer, Character>> eqOracle = new MealyEQOracleChain<>(eqOracles); // construct the experiment MealyExperiment<Integer, OutputAndLocalInputs<Integer, Character>> experiment = new MealyExperiment<>(lstar, eqOracle, INPUTS); // run experiment experiment.run(); // report results System.out.println("Transformed Hypothesis" + (withCache ? ", with cache" : "")); System.out.println("-------------------------------------------------------"); System.out.println(resetCounter.getStatisticalData().getSummary()); System.out.println(symbolCounter.getStatisticalData().getSummary()); System.out.println("-------------------------------------------------------"); } }
public class class_name { public Children add(String name, String colour, String line) { Children children = new Children(name, colour, line); if (this.children == null) { this.children = new ArrayList<Children>(); } this.children.add(children); return children; } }
public class class_name { public Children add(String name, String colour, String line) { Children children = new Children(name, colour, line); if (this.children == null) { this.children = new ArrayList<Children>(); // depends on control dependency: [if], data = [none] } this.children.add(children); return children; } }
public class class_name { public void marshall(Repository repository, ProtocolMarshaller protocolMarshaller) { if (repository == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(repository.getRepositoryArn(), REPOSITORYARN_BINDING); protocolMarshaller.marshall(repository.getRegistryId(), REGISTRYID_BINDING); protocolMarshaller.marshall(repository.getRepositoryName(), REPOSITORYNAME_BINDING); protocolMarshaller.marshall(repository.getRepositoryUri(), REPOSITORYURI_BINDING); protocolMarshaller.marshall(repository.getCreatedAt(), CREATEDAT_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(Repository repository, ProtocolMarshaller protocolMarshaller) { if (repository == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(repository.getRepositoryArn(), REPOSITORYARN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(repository.getRegistryId(), REGISTRYID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(repository.getRepositoryName(), REPOSITORYNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(repository.getRepositoryUri(), REPOSITORYURI_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(repository.getCreatedAt(), CREATEDAT_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public <TInput, TOutput> VortexAggregateFuture<TInput, TOutput> enqueueTasklets(final VortexAggregateFunction<TOutput> aggregateFunction, final VortexFunction<TInput, TOutput> vortexFunction, final VortexAggregatePolicy policy, final List<TInput> inputs, final Optional<FutureCallback<AggregateResult<TInput, TOutput>>> callback) { final int aggregateFunctionId = aggregateIdCounter.getAndIncrement(); aggregateFunctionRepository.put(aggregateFunctionId, aggregateFunction, policy); final List<Tasklet> tasklets = new ArrayList<>(inputs.size()); final Map<Integer, TInput> taskletIdInputMap = new HashMap<>(inputs.size()); for (final TInput input : inputs) { taskletIdInputMap.put(taskletIdCounter.getAndIncrement(), input); } final VortexAggregateFuture<TInput, TOutput> vortexAggregateFuture; if (callback.isPresent()) { vortexAggregateFuture = new VortexAggregateFuture<>(executor, taskletIdInputMap, callback.get()); } else { vortexAggregateFuture = new VortexAggregateFuture<>(executor, taskletIdInputMap, null); } for (final Map.Entry<Integer, TInput> taskletIdInputEntry : taskletIdInputMap.entrySet()) { final Tasklet tasklet = new Tasklet<>(taskletIdInputEntry.getKey(), Optional.of(aggregateFunctionId), vortexFunction, taskletIdInputEntry.getValue(), vortexAggregateFuture); tasklets.add(tasklet); pendingTasklets.addLast(tasklet); } putDelegate(tasklets, vortexAggregateFuture); return vortexAggregateFuture; } }
public class class_name { @Override public <TInput, TOutput> VortexAggregateFuture<TInput, TOutput> enqueueTasklets(final VortexAggregateFunction<TOutput> aggregateFunction, final VortexFunction<TInput, TOutput> vortexFunction, final VortexAggregatePolicy policy, final List<TInput> inputs, final Optional<FutureCallback<AggregateResult<TInput, TOutput>>> callback) { final int aggregateFunctionId = aggregateIdCounter.getAndIncrement(); aggregateFunctionRepository.put(aggregateFunctionId, aggregateFunction, policy); final List<Tasklet> tasklets = new ArrayList<>(inputs.size()); final Map<Integer, TInput> taskletIdInputMap = new HashMap<>(inputs.size()); for (final TInput input : inputs) { taskletIdInputMap.put(taskletIdCounter.getAndIncrement(), input); // depends on control dependency: [for], data = [input] } final VortexAggregateFuture<TInput, TOutput> vortexAggregateFuture; if (callback.isPresent()) { vortexAggregateFuture = new VortexAggregateFuture<>(executor, taskletIdInputMap, callback.get()); // depends on control dependency: [if], data = [none] } else { vortexAggregateFuture = new VortexAggregateFuture<>(executor, taskletIdInputMap, null); // depends on control dependency: [if], data = [none] } for (final Map.Entry<Integer, TInput> taskletIdInputEntry : taskletIdInputMap.entrySet()) { final Tasklet tasklet = new Tasklet<>(taskletIdInputEntry.getKey(), Optional.of(aggregateFunctionId), vortexFunction, taskletIdInputEntry.getValue(), vortexAggregateFuture); tasklets.add(tasklet); // depends on control dependency: [for], data = [none] pendingTasklets.addLast(tasklet); // depends on control dependency: [for], data = [none] } putDelegate(tasklets, vortexAggregateFuture); return vortexAggregateFuture; } }
public class class_name { @Override public boolean statusUpdate(final String appName, final String id, final InstanceStatus newStatus, String lastDirtyTimestamp, final boolean isReplication) { if (super.statusUpdate(appName, id, newStatus, lastDirtyTimestamp, isReplication)) { replicateToPeers(Action.StatusUpdate, appName, id, null, newStatus, isReplication); return true; } return false; } }
public class class_name { @Override public boolean statusUpdate(final String appName, final String id, final InstanceStatus newStatus, String lastDirtyTimestamp, final boolean isReplication) { if (super.statusUpdate(appName, id, newStatus, lastDirtyTimestamp, isReplication)) { replicateToPeers(Action.StatusUpdate, appName, id, null, newStatus, isReplication); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { private GeneralMatrix fillVMatrix( int dim, Coordinate[] controlPoints ) { int controlPointsNum = controlPoints.length; GeneralMatrix V = new GeneralMatrix(controlPointsNum + 3, 1); for( int i = 0; i < controlPointsNum; i++ ) { V.setElement(i, 0, controlPoints[i].z); } V.setElement(V.getNumRow() - 3, 0, 0); V.setElement(V.getNumRow() - 2, 0, 0); V.setElement(V.getNumRow() - 1, 0, 0); return V; } }
public class class_name { private GeneralMatrix fillVMatrix( int dim, Coordinate[] controlPoints ) { int controlPointsNum = controlPoints.length; GeneralMatrix V = new GeneralMatrix(controlPointsNum + 3, 1); for( int i = 0; i < controlPointsNum; i++ ) { V.setElement(i, 0, controlPoints[i].z); // depends on control dependency: [for], data = [i] } V.setElement(V.getNumRow() - 3, 0, 0); V.setElement(V.getNumRow() - 2, 0, 0); V.setElement(V.getNumRow() - 1, 0, 0); return V; } }
public class class_name { @Override public TextSpan getSpan(Token token, String type) { TextSpan span = null; Map<Token, TextSpan> tokToSpan = connector.token2ItsTextSpans.get(type); if (tokToSpan != null) { span = tokToSpan.get(token); } return span; } }
public class class_name { @Override public TextSpan getSpan(Token token, String type) { TextSpan span = null; Map<Token, TextSpan> tokToSpan = connector.token2ItsTextSpans.get(type); if (tokToSpan != null) { span = tokToSpan.get(token); // depends on control dependency: [if], data = [none] } return span; } }
public class class_name { public void setResourceValueMap(Map<String, String> valueMap) { checkFrozen(); if (valueMap != null) { m_resourceValueMap = new HashMap<String, String>(valueMap); m_resourceValueMap = Collections.unmodifiableMap(m_resourceValueMap); m_resourceValue = createValueFromMap(m_resourceValueMap); } else { m_resourceValueMap = null; m_resourceValue = null; } } }
public class class_name { public void setResourceValueMap(Map<String, String> valueMap) { checkFrozen(); if (valueMap != null) { m_resourceValueMap = new HashMap<String, String>(valueMap); // depends on control dependency: [if], data = [(valueMap] m_resourceValueMap = Collections.unmodifiableMap(m_resourceValueMap); // depends on control dependency: [if], data = [none] m_resourceValue = createValueFromMap(m_resourceValueMap); // depends on control dependency: [if], data = [none] } else { m_resourceValueMap = null; // depends on control dependency: [if], data = [none] m_resourceValue = null; // depends on control dependency: [if], data = [none] } } }
public class class_name { private Color loadColor(String colorKey) { String colorCode = loadMessage(colorKey); if (colorCode == null) { return null; } try { return Color.decode(colorCode); } catch (NumberFormatException nfe) { if (logger.isWarnEnabled()) { logger.warn("Could not parse a valid Color from code [" + colorCode + "]. Ignoring and returning null."); } return null; } } }
public class class_name { private Color loadColor(String colorKey) { String colorCode = loadMessage(colorKey); if (colorCode == null) { return null; // depends on control dependency: [if], data = [none] } try { return Color.decode(colorCode); // depends on control dependency: [try], data = [none] } catch (NumberFormatException nfe) { if (logger.isWarnEnabled()) { logger.warn("Could not parse a valid Color from code [" + colorCode + "]. Ignoring and returning null."); // depends on control dependency: [if], data = [none] } return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public boolean visit(Assignment node) { Expression lhs = node.getLeftHandSide(); TypeMirror lhsType = lhs.getTypeMirror(); if (lhs instanceof ArrayAccess && !lhsType.getKind().isPrimitive()) { FunctionInvocation newAssignment = newArrayAssignment(node, (ArrayAccess) lhs, lhsType); node.replaceWith(newAssignment); newAssignment.accept(this); return false; } return true; } }
public class class_name { @Override public boolean visit(Assignment node) { Expression lhs = node.getLeftHandSide(); TypeMirror lhsType = lhs.getTypeMirror(); if (lhs instanceof ArrayAccess && !lhsType.getKind().isPrimitive()) { FunctionInvocation newAssignment = newArrayAssignment(node, (ArrayAccess) lhs, lhsType); node.replaceWith(newAssignment); // depends on control dependency: [if], data = [none] newAssignment.accept(this); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } return true; } }
public class class_name { public static void unmask(WrappedByteBuffer buf, int mask) { byte b; int remainder = buf.remaining() % 4; int remaining = buf.remaining() - remainder; int end = remaining + buf.position(); // xor a 32bit word at a time as long as possible while (buf.position() < end) { int plaintext = buf.getIntAt(buf.position()) ^ mask; buf.putInt(plaintext); } // xor the remaining 3, 2, or 1 bytes switch (remainder) { case 3: b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 24) & 0xff)); buf.put(b); b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 16) & 0xff)); buf.put(b); b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 8) & 0xff)); buf.put(b); break; case 2: b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 24) & 0xff)); buf.put(b); b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 16) & 0xff)); buf.put(b); break; case 1: b = (byte) (buf.getAt(buf.position()) ^ (mask >> 24)); buf.put(b); break; case 0: default: break; } //buf.position(start); } }
public class class_name { public static void unmask(WrappedByteBuffer buf, int mask) { byte b; int remainder = buf.remaining() % 4; int remaining = buf.remaining() - remainder; int end = remaining + buf.position(); // xor a 32bit word at a time as long as possible while (buf.position() < end) { int plaintext = buf.getIntAt(buf.position()) ^ mask; buf.putInt(plaintext); // depends on control dependency: [while], data = [none] } // xor the remaining 3, 2, or 1 bytes switch (remainder) { case 3: b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 24) & 0xff)); buf.put(b); b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 16) & 0xff)); buf.put(b); b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 8) & 0xff)); buf.put(b); break; case 2: b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 24) & 0xff)); buf.put(b); b = (byte) (buf.getAt(buf.position()) ^ ((mask >> 16) & 0xff)); buf.put(b); break; case 1: b = (byte) (buf.getAt(buf.position()) ^ (mask >> 24)); buf.put(b); break; case 0: default: break; } //buf.position(start); } }
public class class_name { @Override public void stopModule(ExtendedModuleInfo moduleInfo) { ConnectorModuleMetaDataImpl metadataImpl = (ConnectorModuleMetaDataImpl) moduleInfo.getMetaData(); String id = metadataImpl.getIdentifier(); metaDataService.fireComponentMetaDataDestroyed(metadataImpl.getComponentMetaDatas()[0]); removeServiceListeners(id); // TEMP begin CountDownLatch bcLatch = BootstrapContextImpl.latches.get(id); // TEMP end ConnectorModuleMetatype cmmt = moduleMetatype.remove(id.toUpperCase()); if (cmmt != null) try { cmmt.removeMetatype(); } catch (Throwable x) { } // TEMP begin for (boolean stopped = bcLatch == null || FrameworkState.isStopping(); !stopped;) try { stopped = bcLatch.await(1, TimeUnit.SECONDS) || FrameworkState.isStopping(); } catch (InterruptedException e) { e.getCause(); } // TEMP end } }
public class class_name { @Override public void stopModule(ExtendedModuleInfo moduleInfo) { ConnectorModuleMetaDataImpl metadataImpl = (ConnectorModuleMetaDataImpl) moduleInfo.getMetaData(); String id = metadataImpl.getIdentifier(); metaDataService.fireComponentMetaDataDestroyed(metadataImpl.getComponentMetaDatas()[0]); removeServiceListeners(id); // TEMP begin CountDownLatch bcLatch = BootstrapContextImpl.latches.get(id); // TEMP end ConnectorModuleMetatype cmmt = moduleMetatype.remove(id.toUpperCase()); if (cmmt != null) try { cmmt.removeMetatype(); // depends on control dependency: [try], data = [none] } catch (Throwable x) { } // depends on control dependency: [catch], data = [none] // TEMP begin for (boolean stopped = bcLatch == null || FrameworkState.isStopping(); !stopped;) try { stopped = bcLatch.await(1, TimeUnit.SECONDS) || FrameworkState.isStopping(); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { e.getCause(); } // depends on control dependency: [catch], data = [none] // TEMP end } }
public class class_name { static void loadFromIterable(Context cx, Scriptable scope, ScriptableObject set, Object arg1) { if ((arg1 == null) || Undefined.instance.equals(arg1)) { return; } // Call the "[Symbol.iterator]" property as a function. Object ito = ScriptRuntime.callIterator(arg1, cx, scope); if (Undefined.instance.equals(ito)) { // Per spec, ignore if the iterator returns undefined return; } // Find the "add" function of our own prototype, since it might have // been replaced. Since we're not fully constructed yet, create a dummy instance // so that we can get our own prototype. ScriptableObject dummy = ensureScriptableObject(cx.newObject(scope, set.getClassName())); final Callable add = ScriptRuntime.getPropFunctionAndThis(dummy.getPrototype(), "add", cx, scope); // Clean up the value left around by the previous function ScriptRuntime.lastStoredScriptable(cx); // Finally, run through all the iterated values and add them! try (IteratorLikeIterable it = new IteratorLikeIterable(cx, scope, ito)) { for (Object val : it) { final Object finalVal = val == Scriptable.NOT_FOUND ? Undefined.instance : val; add.call(cx, scope, set, new Object[]{finalVal}); } } } }
public class class_name { static void loadFromIterable(Context cx, Scriptable scope, ScriptableObject set, Object arg1) { if ((arg1 == null) || Undefined.instance.equals(arg1)) { return; // depends on control dependency: [if], data = [none] } // Call the "[Symbol.iterator]" property as a function. Object ito = ScriptRuntime.callIterator(arg1, cx, scope); if (Undefined.instance.equals(ito)) { // Per spec, ignore if the iterator returns undefined return; // depends on control dependency: [if], data = [none] } // Find the "add" function of our own prototype, since it might have // been replaced. Since we're not fully constructed yet, create a dummy instance // so that we can get our own prototype. ScriptableObject dummy = ensureScriptableObject(cx.newObject(scope, set.getClassName())); final Callable add = ScriptRuntime.getPropFunctionAndThis(dummy.getPrototype(), "add", cx, scope); // Clean up the value left around by the previous function ScriptRuntime.lastStoredScriptable(cx); // Finally, run through all the iterated values and add them! try (IteratorLikeIterable it = new IteratorLikeIterable(cx, scope, ito)) { for (Object val : it) { final Object finalVal = val == Scriptable.NOT_FOUND ? Undefined.instance : val; add.call(cx, scope, set, new Object[]{finalVal}); // depends on control dependency: [for], data = [none] } } } }
public class class_name { protected static void adjustPan (Line line, float pan) { try { FloatControl control = (FloatControl) line.getControl(FloatControl.Type.PAN); control.setValue(pan); } catch (Exception e) { log.debug("Cannot set pan on line: " + e); } } }
public class class_name { protected static void adjustPan (Line line, float pan) { try { FloatControl control = (FloatControl) line.getControl(FloatControl.Type.PAN); control.setValue(pan); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.debug("Cannot set pan on line: " + e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Recipes withUndeploy(String... undeploy) { if (this.undeploy == null) { setUndeploy(new com.amazonaws.internal.SdkInternalList<String>(undeploy.length)); } for (String ele : undeploy) { this.undeploy.add(ele); } return this; } }
public class class_name { public Recipes withUndeploy(String... undeploy) { if (this.undeploy == null) { setUndeploy(new com.amazonaws.internal.SdkInternalList<String>(undeploy.length)); // depends on control dependency: [if], data = [none] } for (String ele : undeploy) { this.undeploy.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { static public SimpleGraph getMoleculeGraph(IAtomContainer molecule) { SimpleGraph graph = new SimpleGraph(); for (int i = 0; i < molecule.getAtomCount(); i++) { IAtom atom = molecule.getAtom(i); graph.addVertex(atom); } for (int i = 0; i < molecule.getBondCount(); i++) { IBond bond = molecule.getBond(i); /* * int order = (int) bond.getOrder(); for (int j=0; j<order; j++) { * graph.addEdge(bond.getAtoms()[0], bond.getAtoms()[1]); } */ graph.addEdge(bond.getBegin(), bond.getEnd()); } return graph; } }
public class class_name { static public SimpleGraph getMoleculeGraph(IAtomContainer molecule) { SimpleGraph graph = new SimpleGraph(); for (int i = 0; i < molecule.getAtomCount(); i++) { IAtom atom = molecule.getAtom(i); graph.addVertex(atom); // depends on control dependency: [for], data = [none] } for (int i = 0; i < molecule.getBondCount(); i++) { IBond bond = molecule.getBond(i); /* * int order = (int) bond.getOrder(); for (int j=0; j<order; j++) { * graph.addEdge(bond.getAtoms()[0], bond.getAtoms()[1]); } */ graph.addEdge(bond.getBegin(), bond.getEnd()); // depends on control dependency: [for], data = [none] } return graph; } }
public class class_name { protected FieldDescriptor[] buildFieldsForSelect(ClassDescriptor cld) { DescriptorRepository repository = cld.getRepository(); Set fields = new ListOrderedSet(); // keep the order of the fields // add Standard Fields // MBAIRD: if the object being queried on has multiple classes mapped to the table, // then we will get all the fields that are a unique set across all those classes so if we need to // we can materialize an extent FieldDescriptor fds[] = repository.getFieldDescriptorsForMultiMappedTable(cld); for (int i = 0; i < fds.length; i++) { fields.add(fds[i]); } // add inherited Fields. This is important when querying for a class having a super-reference fds = cld.getFieldDescriptor(true); for (int i = 0; i < fds.length; i++) { fields.add(fds[i]); } // add Fields of joined subclasses Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true); for (int c = 0; c < multiJoinedClasses.length; c++) { ClassDescriptor subCld = repository.getDescriptorFor(multiJoinedClasses[c]); fds = subCld.getFieldDescriptions(); for (int i = 0; i < fds.length; i++) { fields.add(fds[i]); } } FieldDescriptor[] result = new FieldDescriptor[fields.size()]; fields.toArray(result); return result; } }
public class class_name { protected FieldDescriptor[] buildFieldsForSelect(ClassDescriptor cld) { DescriptorRepository repository = cld.getRepository(); Set fields = new ListOrderedSet(); // keep the order of the fields // add Standard Fields // MBAIRD: if the object being queried on has multiple classes mapped to the table, // then we will get all the fields that are a unique set across all those classes so if we need to // we can materialize an extent FieldDescriptor fds[] = repository.getFieldDescriptorsForMultiMappedTable(cld); for (int i = 0; i < fds.length; i++) { fields.add(fds[i]); // depends on control dependency: [for], data = [i] } // add inherited Fields. This is important when querying for a class having a super-reference fds = cld.getFieldDescriptor(true); for (int i = 0; i < fds.length; i++) { fields.add(fds[i]); // depends on control dependency: [for], data = [i] } // add Fields of joined subclasses Class[] multiJoinedClasses = repository.getSubClassesMultipleJoinedTables(cld, true); for (int c = 0; c < multiJoinedClasses.length; c++) { ClassDescriptor subCld = repository.getDescriptorFor(multiJoinedClasses[c]); fds = subCld.getFieldDescriptions(); // depends on control dependency: [for], data = [none] for (int i = 0; i < fds.length; i++) { fields.add(fds[i]); // depends on control dependency: [for], data = [i] } } FieldDescriptor[] result = new FieldDescriptor[fields.size()]; fields.toArray(result); return result; } }
public class class_name { public void setHighlightSections(final boolean HIGHLIGHT) { if (null == highlightSections) { _highlightSections = HIGHLIGHT; fireUpdateEvent(REDRAW_EVENT); } else { highlightSections.set(HIGHLIGHT); } } }
public class class_name { public void setHighlightSections(final boolean HIGHLIGHT) { if (null == highlightSections) { _highlightSections = HIGHLIGHT; // depends on control dependency: [if], data = [none] fireUpdateEvent(REDRAW_EVENT); // depends on control dependency: [if], data = [none] } else { highlightSections.set(HIGHLIGHT); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void parseInputQuery(String query) { List<String> projectedColumns = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { String queryLowerCase = query.toLowerCase(); int startIndex = queryLowerCase.indexOf("select ") + 7; int endIndex = queryLowerCase.indexOf(" from "); if (startIndex >= 0 && endIndex >= 0) { String columnProjection = query.substring(startIndex, endIndex); this.setInputColumnProjection(columnProjection); // parse the select list StringBuffer sb = new StringBuffer(); int bracketCount = 0; for (int i = 0; i < columnProjection.length(); i++) { char c = columnProjection.charAt(i); if (c == '(') { bracketCount++; } if (c == ')') { bracketCount--; } if (bracketCount != 0) { sb.append(c); } else { if (c != ',') { sb.append(c); } else { projectedColumns.add(sb.toString()); sb = new StringBuffer(); } } } projectedColumns.add(sb.toString()); } } if (this.isSelectAllColumns()) { List<String> columnList = this.getMetadataColumnList(); for (String columnName : columnList) { ColumnAttributes col = new ColumnAttributes(); col.setColumnName(columnName); col.setAliasName(columnName); col.setSourceColumnName(columnName); this.addToColumnAliasMap(col); } } else { for (String projectedColumn : projectedColumns) { String column = projectedColumn.trim(); String alias = null; String sourceColumn = column; int spaceOccurences = StringUtils.countMatches(column.trim(), " "); if (spaceOccurences > 0) { // separate column and alias if they are separated by "as" // or space int lastSpaceIndex = column.toLowerCase().lastIndexOf(" as "); sourceColumn = column.substring(0, lastSpaceIndex); alias = column.substring(lastSpaceIndex + 4); } // extract column name if projection has table name in it String columnName = sourceColumn; if (sourceColumn.contains(".")) { columnName = sourceColumn.substring(sourceColumn.indexOf(".") + 1); } ColumnAttributes col = new ColumnAttributes(); col.setColumnName(columnName); col.setAliasName(alias); col.setSourceColumnName(sourceColumn); this.addToColumnAliasMap(col); } } } }
public class class_name { private void parseInputQuery(String query) { List<String> projectedColumns = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { String queryLowerCase = query.toLowerCase(); int startIndex = queryLowerCase.indexOf("select ") + 7; int endIndex = queryLowerCase.indexOf(" from "); if (startIndex >= 0 && endIndex >= 0) { String columnProjection = query.substring(startIndex, endIndex); this.setInputColumnProjection(columnProjection); // depends on control dependency: [if], data = [none] // parse the select list StringBuffer sb = new StringBuffer(); int bracketCount = 0; for (int i = 0; i < columnProjection.length(); i++) { char c = columnProjection.charAt(i); if (c == '(') { bracketCount++; // depends on control dependency: [if], data = [none] } if (c == ')') { bracketCount--; // depends on control dependency: [if], data = [none] } if (bracketCount != 0) { sb.append(c); // depends on control dependency: [if], data = [none] } else { if (c != ',') { sb.append(c); // depends on control dependency: [if], data = [(c] } else { projectedColumns.add(sb.toString()); // depends on control dependency: [if], data = [none] sb = new StringBuffer(); // depends on control dependency: [if], data = [none] } } } projectedColumns.add(sb.toString()); // depends on control dependency: [if], data = [none] } } if (this.isSelectAllColumns()) { List<String> columnList = this.getMetadataColumnList(); for (String columnName : columnList) { ColumnAttributes col = new ColumnAttributes(); col.setColumnName(columnName); // depends on control dependency: [for], data = [columnName] col.setAliasName(columnName); // depends on control dependency: [for], data = [columnName] col.setSourceColumnName(columnName); // depends on control dependency: [for], data = [columnName] this.addToColumnAliasMap(col); // depends on control dependency: [for], data = [none] } } else { for (String projectedColumn : projectedColumns) { String column = projectedColumn.trim(); String alias = null; String sourceColumn = column; int spaceOccurences = StringUtils.countMatches(column.trim(), " "); if (spaceOccurences > 0) { // separate column and alias if they are separated by "as" // or space int lastSpaceIndex = column.toLowerCase().lastIndexOf(" as "); sourceColumn = column.substring(0, lastSpaceIndex); // depends on control dependency: [if], data = [none] alias = column.substring(lastSpaceIndex + 4); // depends on control dependency: [if], data = [none] } // extract column name if projection has table name in it String columnName = sourceColumn; if (sourceColumn.contains(".")) { columnName = sourceColumn.substring(sourceColumn.indexOf(".") + 1); // depends on control dependency: [if], data = [none] } ColumnAttributes col = new ColumnAttributes(); col.setColumnName(columnName); // depends on control dependency: [for], data = [none] col.setAliasName(alias); // depends on control dependency: [for], data = [none] col.setSourceColumnName(sourceColumn); // depends on control dependency: [for], data = [none] this.addToColumnAliasMap(col); // depends on control dependency: [for], data = [none] } } } }
public class class_name { public static boolean setLogBackLevel(String loggerName, String logLevel) { String logLevelUpper = (logLevel == null) ? "OFF" : logLevel.toUpperCase(); try { Package logbackPackage = Package.getPackage(LOGBACK_CLASSIC); if (logbackPackage == null) { LOG.warn("Logback is not in the classpath!"); return false; } // Use ROOT logger if given logger name is blank. if ((loggerName == null) || loggerName.trim().isEmpty()) { loggerName = (String) getFieldVaulue(LOGBACK_CLASSIC_LOGGER, "ROOT_LOGGER_NAME"); } // Obtain logger by the name Logger loggerObtained = LoggerFactory.getLogger(loggerName); if (loggerObtained == null) { // I don't know if this case occurs LOG.warn("No logger for the name: {}", loggerName); return false; } Object logLevelObj = getFieldVaulue(LOGBACK_CLASSIC_LEVEL, logLevelUpper); if (logLevelObj == null) { LOG.warn("No such log level: {}", logLevelUpper); return false; } Class<?>[] paramTypes = {logLevelObj.getClass()}; Object[] params = {logLevelObj}; Class<?> clz = Class.forName(LOGBACK_CLASSIC_LOGGER); Method method = clz.getMethod("setLevel", paramTypes); method.invoke(loggerObtained, params); LOG.info("LogBack level set to {} for the logger '{}'", logLevelUpper, loggerName); return true; } catch (NoClassDefFoundError e) { LOG.warn("Couldn't set logback level to {} for the logger '{}'", logLevelUpper, loggerName, e); return false; } catch (Exception e) { LOG.warn("Couldn't set logback level to {} for the logger '{}'", logLevelUpper, loggerName, e); return false; } } }
public class class_name { public static boolean setLogBackLevel(String loggerName, String logLevel) { String logLevelUpper = (logLevel == null) ? "OFF" : logLevel.toUpperCase(); try { Package logbackPackage = Package.getPackage(LOGBACK_CLASSIC); if (logbackPackage == null) { LOG.warn("Logback is not in the classpath!"); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } // Use ROOT logger if given logger name is blank. if ((loggerName == null) || loggerName.trim().isEmpty()) { loggerName = (String) getFieldVaulue(LOGBACK_CLASSIC_LOGGER, "ROOT_LOGGER_NAME"); // depends on control dependency: [if], data = [none] } // Obtain logger by the name Logger loggerObtained = LoggerFactory.getLogger(loggerName); if (loggerObtained == null) { // I don't know if this case occurs LOG.warn("No logger for the name: {}", loggerName); return false; } Object logLevelObj = getFieldVaulue(LOGBACK_CLASSIC_LEVEL, logLevelUpper); if (logLevelObj == null) { LOG.warn("No such log level: {}", logLevelUpper); return false; } Class<?>[] paramTypes = {logLevelObj.getClass()}; Object[] params = {logLevelObj}; Class<?> clz = Class.forName(LOGBACK_CLASSIC_LOGGER); Method method = clz.getMethod("setLevel", paramTypes); method.invoke(loggerObtained, params); LOG.info("LogBack level set to {} for the logger '{}'", logLevelUpper, loggerName); return true; } catch (NoClassDefFoundError e) { LOG.warn("Couldn't set logback level to {} for the logger '{}'", logLevelUpper, loggerName, e); return false; } catch (Exception e) { LOG.warn("Couldn't set logback level to {} for the logger '{}'", logLevelUpper, loggerName, e); return false; } } }
public class class_name { private static void checkServerRunning(final DeviceImportInfo importInfo, final String toBeImported) throws DevFailed { XLOGGER.entry(); Device_5 devIDL5 = null; Device_4 devIDL4 = null; Device_3 devIDL3 = null; Device_2 devIDL2 = null; Device devIDL1 = null; try { // try IDL5 try { devIDL5 = narrowIDL5(importInfo); } catch (final BAD_PARAM e) { // try IDL4 try { devIDL4 = narrowIDL4(importInfo); } catch (final BAD_PARAM e4) { // maybe another IDL is currently running // try IDL3 try { devIDL3 = narrowIDL3(importInfo); } catch (final BAD_PARAM e1) { // maybe another IDL is currently running // try IDL2 try { devIDL2 = narrowIDL2(importInfo); } catch (final BAD_PARAM e2) { // maybe another IDL is currently running // try IDL1 try { devIDL1 = narrowIDL1(importInfo); } catch (final BAD_PARAM e3) { // may not occur, unknown CORBA server throw DevFailedUtils.newDevFailed(e); } } } } } if (devIDL5 == null && devIDL4 == null && devIDL3 == null && devIDL2 == null && devIDL1 == null) { LOGGER.debug("out, device is not running"); } else { checkDeviceName(toBeImported, devIDL5, devIDL4, devIDL3, devIDL2, devIDL1); } } catch (final org.omg.CORBA.TIMEOUT e) { // Receive a Timeout exception ---> It is not running !!!! LOGGER.debug("out on TIMEOUT"); } catch (final BAD_OPERATION e) { // System.err.println("Can't pack/unpack data sent to/from database in/to Any object"); throw DevFailedUtils.newDevFailed(e); } catch (final TRANSIENT e) { LOGGER.debug("out on TRANSIENT, device is not running"); } catch (final OBJECT_NOT_EXIST e) { LOGGER.debug("out on OBJECT_NOT_EXIST, device is not running"); } catch (final COMM_FAILURE e) { LOGGER.debug("out on COMM_FAILURE,, device is not running"); } catch (final BAD_INV_ORDER e) { LOGGER.debug("out on BAD_INV_ORDER,, device is not running"); } XLOGGER.exit(); } }
public class class_name { private static void checkServerRunning(final DeviceImportInfo importInfo, final String toBeImported) throws DevFailed { XLOGGER.entry(); Device_5 devIDL5 = null; Device_4 devIDL4 = null; Device_3 devIDL3 = null; Device_2 devIDL2 = null; Device devIDL1 = null; try { // try IDL5 try { devIDL5 = narrowIDL5(importInfo); // depends on control dependency: [try], data = [none] } catch (final BAD_PARAM e) { // try IDL4 try { devIDL4 = narrowIDL4(importInfo); // depends on control dependency: [try], data = [none] } catch (final BAD_PARAM e4) { // maybe another IDL is currently running // try IDL3 try { devIDL3 = narrowIDL3(importInfo); // depends on control dependency: [try], data = [none] } catch (final BAD_PARAM e1) { // maybe another IDL is currently running // try IDL2 try { devIDL2 = narrowIDL2(importInfo); // depends on control dependency: [try], data = [none] } catch (final BAD_PARAM e2) { // maybe another IDL is currently running // try IDL1 try { devIDL1 = narrowIDL1(importInfo); // depends on control dependency: [try], data = [none] } catch (final BAD_PARAM e3) { // may not occur, unknown CORBA server throw DevFailedUtils.newDevFailed(e); } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] if (devIDL5 == null && devIDL4 == null && devIDL3 == null && devIDL2 == null && devIDL1 == null) { LOGGER.debug("out, device is not running"); // depends on control dependency: [if], data = [none] } else { checkDeviceName(toBeImported, devIDL5, devIDL4, devIDL3, devIDL2, devIDL1); // depends on control dependency: [if], data = [none] } } catch (final org.omg.CORBA.TIMEOUT e) { // Receive a Timeout exception ---> It is not running !!!! LOGGER.debug("out on TIMEOUT"); } catch (final BAD_OPERATION e) { // System.err.println("Can't pack/unpack data sent to/from database in/to Any object"); throw DevFailedUtils.newDevFailed(e); } catch (final TRANSIENT e) { LOGGER.debug("out on TRANSIENT, device is not running"); } catch (final OBJECT_NOT_EXIST e) { LOGGER.debug("out on OBJECT_NOT_EXIST, device is not running"); } catch (final COMM_FAILURE e) { LOGGER.debug("out on COMM_FAILURE,, device is not running"); } catch (final BAD_INV_ORDER e) { LOGGER.debug("out on BAD_INV_ORDER,, device is not running"); } XLOGGER.exit(); } }