code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public Object getValueForCell(final FacesContext context, final String rowKey, final int col) { // if we have a local value, use it // note: can't check for null, as null may be the submitted value final SheetRowColIndex index = new SheetRowColIndex(rowKey, col); if (localValues.containsKey(index)) { return localValues.get(index); } setRowVar(context, rowKey); final SheetColumn column = getColumns().get(col); return column.getValueExpression("value").getValue(context.getELContext()); } }
public class class_name { public Object getValueForCell(final FacesContext context, final String rowKey, final int col) { // if we have a local value, use it // note: can't check for null, as null may be the submitted value final SheetRowColIndex index = new SheetRowColIndex(rowKey, col); if (localValues.containsKey(index)) { return localValues.get(index); // depends on control dependency: [if], data = [none] } setRowVar(context, rowKey); final SheetColumn column = getColumns().get(col); return column.getValueExpression("value").getValue(context.getELContext()); } }
public class class_name { public static String adaptToCustomPropertyName(final String propertyName) { if (propertyName == null || "".equals(propertyName)) { throw new IllegalArgumentException("propertyName cannot be null"); } String customName = propertyName.toLowerCase(); customName = CUSTOM_REGEX.matcher(customName).replaceAll(UNDERSCORE); if (!customName.startsWith(CUSTOM_PREFIX)) { customName = CUSTOM_PREFIX + customName; } return customName; } }
public class class_name { public static String adaptToCustomPropertyName(final String propertyName) { if (propertyName == null || "".equals(propertyName)) { throw new IllegalArgumentException("propertyName cannot be null"); } String customName = propertyName.toLowerCase(); customName = CUSTOM_REGEX.matcher(customName).replaceAll(UNDERSCORE); if (!customName.startsWith(CUSTOM_PREFIX)) { customName = CUSTOM_PREFIX + customName; // depends on control dependency: [if], data = [none] } return customName; } }
public class class_name { public static boolean startsWithIgnoreCase(String input, String prefix) { if (input == null || prefix == null || prefix.length() > input.length()) { return false; } else { final char[] inputCharArray = input.toCharArray(); final char[] prefixCharArray = prefix.toCharArray(); for (int i = 0; i < prefixCharArray.length; i++) { if (!equalsIgnoreCase(prefixCharArray[i], inputCharArray[i])) { return false; } } return true; } } }
public class class_name { public static boolean startsWithIgnoreCase(String input, String prefix) { if (input == null || prefix == null || prefix.length() > input.length()) { return false; // depends on control dependency: [if], data = [none] } else { final char[] inputCharArray = input.toCharArray(); final char[] prefixCharArray = prefix.toCharArray(); for (int i = 0; i < prefixCharArray.length; i++) { if (!equalsIgnoreCase(prefixCharArray[i], inputCharArray[i])) { return false; // depends on control dependency: [if], data = [none] } } return true; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void onCountryCodeSelected(String country, String dialCode) { if (passwordlessRequestCodeLayout != null) { passwordlessRequestCodeLayout.onCountryCodeSelected(country, dialCode); } } }
public class class_name { public void onCountryCodeSelected(String country, String dialCode) { if (passwordlessRequestCodeLayout != null) { passwordlessRequestCodeLayout.onCountryCodeSelected(country, dialCode); // depends on control dependency: [if], data = [none] } } }
public class class_name { private List<String> getFieldPath(String field) { String[] paths = field.split("(?<!\\\\)\\."); ImmutableList.Builder<String> fields = ImmutableList.builder(); for (String path : paths) { fields.add(path.replaceAll("\\\\\\.", ".").replaceAll("\\\\\\\\", "\\\\")); } return fields.build(); } }
public class class_name { private List<String> getFieldPath(String field) { String[] paths = field.split("(?<!\\\\)\\."); ImmutableList.Builder<String> fields = ImmutableList.builder(); for (String path : paths) { fields.add(path.replaceAll("\\\\\\.", ".").replaceAll("\\\\\\\\", "\\\\")); // depends on control dependency: [for], data = [path] } return fields.build(); } }
public class class_name { private List<String> visibleFields(PersonAuth pAuth, SPFPersona persona, String[] fields) { SQLiteDatabase db = getReadableDatabase(); String table = Contract.TABLE_VISIBILITY; String[] columns = { Contract.COLUMN_KEY, Contract.COLUMN_CIRCLE }; String inClause = getInClause(fields); String selection = Contract.COLUMN_PERSONA + " = ? AND " + Contract.COLUMN_KEY + " IN " + inClause; String[] selectionArgs = { persona.getIdentifier() }; String groupBy = null; String having = null; String orderBy = null; Cursor c = db.query(table, columns, selection, selectionArgs, groupBy, having, orderBy); List<String> fieldKeys = new ArrayList<String>(); List<String> privateFields = new ArrayList<String>(); boolean allCircles = pAuth.getCircles().contains(DefaultCircles.ALL_CIRCLE); while (c.moveToNext()) { String key = c.getString(c.getColumnIndex(Contract.COLUMN_KEY)); String circle = c.getString(c.getColumnIndex(Contract.COLUMN_CIRCLE)); if (circle.equals(DefaultCircles.PRIVATE)){ privateFields.add(circle); } else if (allCircles || pAuth.getCircles().contains(circle)) { fieldKeys.add(key); } } c.close(); fieldKeys.removeAll(privateFields); return fieldKeys; } }
public class class_name { private List<String> visibleFields(PersonAuth pAuth, SPFPersona persona, String[] fields) { SQLiteDatabase db = getReadableDatabase(); String table = Contract.TABLE_VISIBILITY; String[] columns = { Contract.COLUMN_KEY, Contract.COLUMN_CIRCLE }; String inClause = getInClause(fields); String selection = Contract.COLUMN_PERSONA + " = ? AND " + Contract.COLUMN_KEY + " IN " + inClause; String[] selectionArgs = { persona.getIdentifier() }; String groupBy = null; String having = null; String orderBy = null; Cursor c = db.query(table, columns, selection, selectionArgs, groupBy, having, orderBy); List<String> fieldKeys = new ArrayList<String>(); List<String> privateFields = new ArrayList<String>(); boolean allCircles = pAuth.getCircles().contains(DefaultCircles.ALL_CIRCLE); while (c.moveToNext()) { String key = c.getString(c.getColumnIndex(Contract.COLUMN_KEY)); String circle = c.getString(c.getColumnIndex(Contract.COLUMN_CIRCLE)); if (circle.equals(DefaultCircles.PRIVATE)){ privateFields.add(circle); // depends on control dependency: [if], data = [none] } else if (allCircles || pAuth.getCircles().contains(circle)) { fieldKeys.add(key); // depends on control dependency: [if], data = [none] } } c.close(); fieldKeys.removeAll(privateFields); return fieldKeys; } }
public class class_name { @Override public void setRollbackOnly() { ContainerTx tx = null; IllegalStateException ise = null; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly: " + this); // ----------------------------------------------------------------------- // setRollbackOnly is not allowed when the container does not have // a meaningful transaction context, which includes the tx attrs // NotSupported, Never, and Supports. // ----------------------------------------------------------------------- // First, obtain the ContainerTx object, to insure the container has a // meaningful transaction context. tx = container.getCurrentContainerTx(); // If there is not a transaction context, or it is for a Local Tran, // then the method is either NotSupported, Never, or Supports // (without an inherited global tran), so throw the exception // required by the EJB Specification. if (tx == null || !tx.isTransactionGlobal()) { ise = new IllegalStateException("setRollbackOnly can not be called " + "without a Transaction Context"); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly: " + ise); throw ise; } // Finally, even if there is a global transaction, setRollbackOnly // is not allowed for methods with the Supports tran attribute. // The tran attribute for the method may be obtained from the // method context (EJSDeployedSupport). d161124 EJSDeployedSupport methodContext = EJSContainer.getMethodContext(); // During commit processing, from a java client or timer, there may not // be a method context so, no need to check for Supports. d177348 if (methodContext != null) { // d161124 setRollbackOnly can not be called from supports methods EJBMethodInfoImpl methodInfo = methodContext.methodInfo; if (methodInfo.getTransactionAttribute() == TransactionAttribute.TX_SUPPORTS) { ise = new IllegalStateException("setRollbackOnly can not be called " + "from a TX SUPPORTS method"); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly: " + ise); throw ise; } // d161124 // If the method on the current bean is the beginner of the // transaction, then this needs to be recorded, so that // postInvoke can avoid throwing an exception if the bean does // not throw an exception; per the EJB Spec. d186801 if (tx.beganInThisScope()) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly called by beginner"); methodContext.ivBeginnerSetRollbackOnly = true; } } tx.setRollbackOnly(); } }
public class class_name { @Override public void setRollbackOnly() { ContainerTx tx = null; IllegalStateException ise = null; if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly: " + this); // ----------------------------------------------------------------------- // setRollbackOnly is not allowed when the container does not have // a meaningful transaction context, which includes the tx attrs // NotSupported, Never, and Supports. // ----------------------------------------------------------------------- // First, obtain the ContainerTx object, to insure the container has a // meaningful transaction context. tx = container.getCurrentContainerTx(); // If there is not a transaction context, or it is for a Local Tran, // then the method is either NotSupported, Never, or Supports // (without an inherited global tran), so throw the exception // required by the EJB Specification. if (tx == null || !tx.isTransactionGlobal()) { ise = new IllegalStateException("setRollbackOnly can not be called " + "without a Transaction Context"); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly: " + ise); throw ise; } // Finally, even if there is a global transaction, setRollbackOnly // is not allowed for methods with the Supports tran attribute. // The tran attribute for the method may be obtained from the // method context (EJSDeployedSupport). d161124 EJSDeployedSupport methodContext = EJSContainer.getMethodContext(); // During commit processing, from a java client or timer, there may not // be a method context so, no need to check for Supports. d177348 if (methodContext != null) { // d161124 setRollbackOnly can not be called from supports methods EJBMethodInfoImpl methodInfo = methodContext.methodInfo; if (methodInfo.getTransactionAttribute() == TransactionAttribute.TX_SUPPORTS) { ise = new IllegalStateException("setRollbackOnly can not be called " + "from a TX SUPPORTS method"); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly: " + ise); throw ise; } // d161124 // If the method on the current bean is the beginner of the // transaction, then this needs to be recorded, so that // postInvoke can avoid throwing an exception if the bean does // not throw an exception; per the EJB Spec. d186801 if (tx.beganInThisScope()) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) Tr.debug(tc, "setRollbackOnly called by beginner"); methodContext.ivBeginnerSetRollbackOnly = true; // depends on control dependency: [if], data = [none] } } tx.setRollbackOnly(); } }
public class class_name { public Integer getMaxStatements() { if (childNode.getTextValueForPatternName("max-statements") != null && !childNode.getTextValueForPatternName("max-statements").equals("null")) { return Integer.valueOf(childNode.getTextValueForPatternName("max-statements")); } return null; } }
public class class_name { public Integer getMaxStatements() { if (childNode.getTextValueForPatternName("max-statements") != null && !childNode.getTextValueForPatternName("max-statements").equals("null")) { return Integer.valueOf(childNode.getTextValueForPatternName("max-statements")); // depends on control dependency: [if], data = [(childNode.getTextValueForPatternName("max-statements")] } return null; } }
public class class_name { public StorableIndex<S> setDefaultDirection(Direction direction) { Direction[] directions = mDirections; unspecified: { for (int i=directions.length; --i>=0; ) { if (directions[i] == Direction.UNSPECIFIED) { break unspecified; } } // Completely specified direction, so nothing to alter. return this; } directions = directions.clone(); for (int i=directions.length; --i>=0; ) { if (directions[i] == Direction.UNSPECIFIED) { directions[i] = direction; } } return new StorableIndex<S>(mProperties, directions, mUnique, mClustered, false); } }
public class class_name { public StorableIndex<S> setDefaultDirection(Direction direction) { Direction[] directions = mDirections; unspecified: { for (int i=directions.length; --i>=0; ) { if (directions[i] == Direction.UNSPECIFIED) { break unspecified; } } // Completely specified direction, so nothing to alter. return this; } directions = directions.clone(); for (int i=directions.length; --i>=0; ) { if (directions[i] == Direction.UNSPECIFIED) { directions[i] = direction; // depends on control dependency: [if], data = [none] } } return new StorableIndex<S>(mProperties, directions, mUnique, mClustered, false); } }
public class class_name { @Override public boolean isTransactionActive() { if (this.getTransactionManager() != null) try { if (this.getTransactionManager().getStatus() == Status.STATUS_ACTIVE) return true; } catch (SystemException se) { if (TC.isDebugEnabled()) { Tr.debug(TC, "Returning isTransactionActive() false: " + se.getMessage()); } return false; } return false; } }
public class class_name { @Override public boolean isTransactionActive() { if (this.getTransactionManager() != null) try { if (this.getTransactionManager().getStatus() == Status.STATUS_ACTIVE) return true; } catch (SystemException se) { if (TC.isDebugEnabled()) { Tr.debug(TC, "Returning isTransactionActive() false: " + se.getMessage()); // depends on control dependency: [if], data = [none] } return false; } // depends on control dependency: [catch], data = [none] return false; } }
public class class_name { public EncryptRequest withGrantTokens(String... grantTokens) { if (this.grantTokens == null) { setGrantTokens(new com.amazonaws.internal.SdkInternalList<String>(grantTokens.length)); } for (String ele : grantTokens) { this.grantTokens.add(ele); } return this; } }
public class class_name { public EncryptRequest withGrantTokens(String... grantTokens) { if (this.grantTokens == null) { setGrantTokens(new com.amazonaws.internal.SdkInternalList<String>(grantTokens.length)); // depends on control dependency: [if], data = [none] } for (String ele : grantTokens) { this.grantTokens.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static ContainerLaunchContext getContainerLaunchContext( final List<String> commands, final Map<String, LocalResource> localResources, final byte[] securityTokenBuffer, final Map<String, String> envMap, final ApplicationId applicationId) { final ContainerLaunchContext context = Records.newRecord(ContainerLaunchContext.class); context.setLocalResources(localResources); context.setCommands(commands); if (applicationId != null) { envMap.put(YarnUtilities.REEF_YARN_APPLICATION_ID_ENV_VAR, applicationId.toString()); } for (final Map.Entry entry : envMap.entrySet()) { LOG.log(Level.FINE, "Key : {0}, Value : {1}", new Object[] {entry.getKey(), entry.getValue()}); } context.setEnvironment(envMap); if (securityTokenBuffer != null) { context.setTokens(ByteBuffer.wrap(securityTokenBuffer)); LOG.log(Level.INFO, "Added tokens to container launch context"); } return context; } }
public class class_name { public static ContainerLaunchContext getContainerLaunchContext( final List<String> commands, final Map<String, LocalResource> localResources, final byte[] securityTokenBuffer, final Map<String, String> envMap, final ApplicationId applicationId) { final ContainerLaunchContext context = Records.newRecord(ContainerLaunchContext.class); context.setLocalResources(localResources); context.setCommands(commands); if (applicationId != null) { envMap.put(YarnUtilities.REEF_YARN_APPLICATION_ID_ENV_VAR, applicationId.toString()); // depends on control dependency: [if], data = [none] } for (final Map.Entry entry : envMap.entrySet()) { LOG.log(Level.FINE, "Key : {0}, Value : {1}", new Object[] {entry.getKey(), entry.getValue()}); // depends on control dependency: [for], data = [entry] } context.setEnvironment(envMap); if (securityTokenBuffer != null) { context.setTokens(ByteBuffer.wrap(securityTokenBuffer)); // depends on control dependency: [if], data = [(securityTokenBuffer] LOG.log(Level.INFO, "Added tokens to container launch context"); // depends on control dependency: [if], data = [none] } return context; } }
public class class_name { public static void setRequestDebuggable(HttpServletRequest req, JawrConfig jawrConfig) { // make sure we have set an overrideKey // make sure the overrideKey exists in the request // lastly, make sure the keys match if (jawrConfig.getDebugOverrideKey().length() > 0 && null != req.getParameter(JawrConstant.OVERRIDE_KEY_PARAMETER_NAME) && jawrConfig .getDebugOverrideKey().equals(req.getParameter(JawrConstant.OVERRIDE_KEY_PARAMETER_NAME))) { ThreadLocalJawrContext.setDebugOverriden(true); } else { ThreadLocalJawrContext.setDebugOverriden(false); } // Inherit the debuggable property of the session if the session is a // debuggable one inheritSessionDebugProperty(req); } }
public class class_name { public static void setRequestDebuggable(HttpServletRequest req, JawrConfig jawrConfig) { // make sure we have set an overrideKey // make sure the overrideKey exists in the request // lastly, make sure the keys match if (jawrConfig.getDebugOverrideKey().length() > 0 && null != req.getParameter(JawrConstant.OVERRIDE_KEY_PARAMETER_NAME) && jawrConfig .getDebugOverrideKey().equals(req.getParameter(JawrConstant.OVERRIDE_KEY_PARAMETER_NAME))) { ThreadLocalJawrContext.setDebugOverriden(true); // depends on control dependency: [if], data = [none] } else { ThreadLocalJawrContext.setDebugOverriden(false); // depends on control dependency: [if], data = [none] } // Inherit the debuggable property of the session if the session is a // debuggable one inheritSessionDebugProperty(req); } }
public class class_name { private String findProperty( String prop ) { String result = System.getProperty( prop ); if ( result == null ) { result = mavenProject.getProperties().getProperty( prop ); } return result; } }
public class class_name { private String findProperty( String prop ) { String result = System.getProperty( prop ); if ( result == null ) { result = mavenProject.getProperties().getProperty( prop ); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static <CL extends ClassUml> Point2D[] evalTwoLinesForAngle(SettingsGraphicUml gp, ClassRelationFull<CL> relClrel) { Point2D pointEnd2 = null; if(relClrel.getRelationship().getSharedJoint() == null) {//from this to across relClrel RectangleRelationship<ClassFull<CL>, CL> nextRelationClass = relClrel.getRelationship().getShapeRelationshipStart() == relClrel.getClassRelationship() ? relClrel.getRelationship().getShapeRelationshipEnd() : relClrel.getRelationship().getShapeRelationshipStart(); pointEnd2 = nextRelationClass.getPointJoint(); } else {//from this to interJoint pointEnd2 = new Point2D(relClrel.getRelationship().getSharedJoint().getX(), relClrel.getRelationship().getSharedJoint().getY()); } Point2D pointEnd1 = new Point2D(relClrel.getClassRelationship().getPointJoint().getX(), relClrel.getClassRelationship().getPointJoint().getY()); if(relClrel.getClassRelationship().getSideJoint() == EJointSide.BOTTOM || relClrel.getClassRelationship().getSideJoint() == EJointSide.TOP) { pointEnd1.setX(pointEnd2.getX()); } else { pointEnd1.setY(pointEnd2.getY()); } return new Point2D[]{relClrel.getClassRelationship().getPointJoint(), pointEnd1, pointEnd2}; } }
public class class_name { public static <CL extends ClassUml> Point2D[] evalTwoLinesForAngle(SettingsGraphicUml gp, ClassRelationFull<CL> relClrel) { Point2D pointEnd2 = null; if(relClrel.getRelationship().getSharedJoint() == null) {//from this to across relClrel RectangleRelationship<ClassFull<CL>, CL> nextRelationClass = relClrel.getRelationship().getShapeRelationshipStart() == relClrel.getClassRelationship() ? relClrel.getRelationship().getShapeRelationshipEnd() : relClrel.getRelationship().getShapeRelationshipStart(); pointEnd2 = nextRelationClass.getPointJoint(); // depends on control dependency: [if], data = [none] } else {//from this to interJoint pointEnd2 = new Point2D(relClrel.getRelationship().getSharedJoint().getX(), relClrel.getRelationship().getSharedJoint().getY()); // depends on control dependency: [if], data = [none] } Point2D pointEnd1 = new Point2D(relClrel.getClassRelationship().getPointJoint().getX(), relClrel.getClassRelationship().getPointJoint().getY()); if(relClrel.getClassRelationship().getSideJoint() == EJointSide.BOTTOM || relClrel.getClassRelationship().getSideJoint() == EJointSide.TOP) { pointEnd1.setX(pointEnd2.getX()); // depends on control dependency: [if], data = [none] } else { pointEnd1.setY(pointEnd2.getY()); // depends on control dependency: [if], data = [none] } return new Point2D[]{relClrel.getClassRelationship().getPointJoint(), pointEnd1, pointEnd2}; } }
public class class_name { public INDArray getArr() { if(sameDiff.arrayAlreadyExistsForVarName(getVarName())) return sameDiff.getArrForVarName(getVarName()); //initialize value if it's actually a scalar constant (zero or 1 typically...) if(getScalarValue() != null && ArrayUtil.prod(getShape()) == 1) { INDArray arr = Nd4j.valueArrayOf(getShape(), getScalarValue().doubleValue()); sameDiff.associateArrayWithVariable(arr,this); } else if(sameDiff.getShapeForVarName(getVarName()) == null) return null; else { INDArray newAlloc = getWeightInitScheme().create(sameDiff.getShapeForVarName(getVarName())); sameDiff.associateArrayWithVariable(newAlloc,this); } return sameDiff.getArrForVarName(getVarName()); } }
public class class_name { public INDArray getArr() { if(sameDiff.arrayAlreadyExistsForVarName(getVarName())) return sameDiff.getArrForVarName(getVarName()); //initialize value if it's actually a scalar constant (zero or 1 typically...) if(getScalarValue() != null && ArrayUtil.prod(getShape()) == 1) { INDArray arr = Nd4j.valueArrayOf(getShape(), getScalarValue().doubleValue()); sameDiff.associateArrayWithVariable(arr,this); // depends on control dependency: [if], data = [none] } else if(sameDiff.getShapeForVarName(getVarName()) == null) return null; else { INDArray newAlloc = getWeightInitScheme().create(sameDiff.getShapeForVarName(getVarName())); sameDiff.associateArrayWithVariable(newAlloc,this); // depends on control dependency: [if], data = [none] } return sameDiff.getArrForVarName(getVarName()); } }
public class class_name { public Object readObject() { JsonParser parser = new JsonParser(input, objsRead, getArgs()); JsonObject<String, Object> root = new JsonObject(); Object o; try { o = parser.readValue(root); if (o == JsonParser.EMPTY_OBJECT) { return new JsonObject(); } } catch (JsonIoException e) { throw e; } catch (Exception e) { throw new JsonIoException("error parsing JSON value", e); } Object graph; if (o instanceof Object[]) { root.setType(Object[].class.getName()); root.setTarget(o); root.put("@items", o); graph = convertParsedMapsToJava(root); } else { graph = o instanceof JsonObject ? convertParsedMapsToJava((JsonObject) o) : o; } // Allow a complete 'Map' return (Javascript style) if (useMaps()) { return o; } return graph; } }
public class class_name { public Object readObject() { JsonParser parser = new JsonParser(input, objsRead, getArgs()); JsonObject<String, Object> root = new JsonObject(); Object o; try { o = parser.readValue(root); // depends on control dependency: [try], data = [none] if (o == JsonParser.EMPTY_OBJECT) { return new JsonObject(); // depends on control dependency: [if], data = [none] } } catch (JsonIoException e) { throw e; } // depends on control dependency: [catch], data = [none] catch (Exception e) { throw new JsonIoException("error parsing JSON value", e); } // depends on control dependency: [catch], data = [none] Object graph; if (o instanceof Object[]) { root.setType(Object[].class.getName()); // depends on control dependency: [if], data = [none] root.setTarget(o); // depends on control dependency: [if], data = [none] root.put("@items", o); // depends on control dependency: [if], data = [none] graph = convertParsedMapsToJava(root); // depends on control dependency: [if], data = [none] } else { graph = o instanceof JsonObject ? convertParsedMapsToJava((JsonObject) o) : o; // depends on control dependency: [if], data = [none] } // Allow a complete 'Map' return (Javascript style) if (useMaps()) { return o; // depends on control dependency: [if], data = [none] } return graph; } }
public class class_name { private List<CmsResourceTypeBean> buildTypesList(List<I_CmsResourceType> types, Set<String> creatableTypes) { ArrayList<CmsResourceTypeBean> list = new ArrayList<CmsResourceTypeBean>(); if (types == null) { return list; } Map<I_CmsResourceType, I_CmsPreviewProvider> typeProviderMapping = getPreviewProviderForTypes(types); Iterator<I_CmsResourceType> it = types.iterator(); while (it.hasNext()) { I_CmsResourceType type = it.next(); try { CmsResourceTypeBean bean = createTypeBean( type, typeProviderMapping.get(type), creatableTypes.contains(type.getTypeName())); list.add(bean); } catch (Exception e) { if (type != null) { log( Messages.get().getBundle(getWorkplaceLocale()).key( Messages.ERR_BUILD_TYPE_LIST_1, type.getTypeName()), e); } } } return list; } }
public class class_name { private List<CmsResourceTypeBean> buildTypesList(List<I_CmsResourceType> types, Set<String> creatableTypes) { ArrayList<CmsResourceTypeBean> list = new ArrayList<CmsResourceTypeBean>(); if (types == null) { return list; // depends on control dependency: [if], data = [none] } Map<I_CmsResourceType, I_CmsPreviewProvider> typeProviderMapping = getPreviewProviderForTypes(types); Iterator<I_CmsResourceType> it = types.iterator(); while (it.hasNext()) { I_CmsResourceType type = it.next(); try { CmsResourceTypeBean bean = createTypeBean( type, typeProviderMapping.get(type), creatableTypes.contains(type.getTypeName())); list.add(bean); // depends on control dependency: [try], data = [none] } catch (Exception e) { if (type != null) { log( Messages.get().getBundle(getWorkplaceLocale()).key( Messages.ERR_BUILD_TYPE_LIST_1, type.getTypeName()), e); // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] } return list; } }
public class class_name { public Set<Long> selectTermIds(final String name) throws SQLException { Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; Set<Long> ids = Sets.newHashSetWithExpectedSize(4); try { conn = connectionSupplier.getConnection(); stmt = conn.prepareStatement(selectTermIdsSQL); stmt.setString(1, name); rs = stmt.executeQuery(); while(rs.next()) { ids.add(rs.getLong(1)); } return ids; } finally { SQLUtil.closeQuietly(conn, stmt, rs); } } }
public class class_name { public Set<Long> selectTermIds(final String name) throws SQLException { Connection conn = null; PreparedStatement stmt = null; ResultSet rs = null; Set<Long> ids = Sets.newHashSetWithExpectedSize(4); try { conn = connectionSupplier.getConnection(); stmt = conn.prepareStatement(selectTermIdsSQL); stmt.setString(1, name); rs = stmt.executeQuery(); while(rs.next()) { ids.add(rs.getLong(1)); // depends on control dependency: [while], data = [none] } return ids; } finally { SQLUtil.closeQuietly(conn, stmt, rs); } } }
public class class_name { public Map<String, String> contractorderBuild() { Map<String, String> map = new HashMap<String, String>(); map.put("appid", getAppId()); map.put("mch_id", getMchId()); map.put("contract_appid", getAppId()); map.put("contract_mchid", getMchId()); map.put("out_trade_no", getOutTradeNo()); map.put("nonce_str", getNonceStr()); map.put("body", getBody()); map.put("attach", getAttach()); map.put("notify_url", getNotifyUrl()); map.put("total_fee", getTotalFee()); map.put("spbill_create_ip", getSpbillCreateIp()); map.put("trade_type", getTradeType().name()); if (getTradeType().equals(TradeType.JSAPI)) { map.put("openid", getOpenId()); } map.put("plan_id", getPlanId()); map.put("contract_code", getContractCode()); map.put("request_serial", getRequestSerial()); map.put("contract_display_account", getContractDisplayAccount()); map.put("contract_notify_url", getContractNotifyUrl()); map.put("sign", PaymentKit.createSign(map, getPaternerKey())); return map; } }
public class class_name { public Map<String, String> contractorderBuild() { Map<String, String> map = new HashMap<String, String>(); map.put("appid", getAppId()); map.put("mch_id", getMchId()); map.put("contract_appid", getAppId()); map.put("contract_mchid", getMchId()); map.put("out_trade_no", getOutTradeNo()); map.put("nonce_str", getNonceStr()); map.put("body", getBody()); map.put("attach", getAttach()); map.put("notify_url", getNotifyUrl()); map.put("total_fee", getTotalFee()); map.put("spbill_create_ip", getSpbillCreateIp()); map.put("trade_type", getTradeType().name()); if (getTradeType().equals(TradeType.JSAPI)) { map.put("openid", getOpenId()); // depends on control dependency: [if], data = [none] } map.put("plan_id", getPlanId()); map.put("contract_code", getContractCode()); map.put("request_serial", getRequestSerial()); map.put("contract_display_account", getContractDisplayAccount()); map.put("contract_notify_url", getContractNotifyUrl()); map.put("sign", PaymentKit.createSign(map, getPaternerKey())); return map; } }
public class class_name { public String upgradeCatalogAsNeeded(InMemoryJarfile outputJar) throws IOException { // getBuildInfoFromJar() performs some validation. String[] buildInfoLines = CatalogUtil.getBuildInfoFromJar(outputJar); String versionFromCatalog = buildInfoLines[0]; // Set if an upgrade happens. String upgradedFromVersion = null; // Check if it's compatible (or the upgrade is being forced). // getConfig() may return null if it's being mocked for a test. if ( VoltDB.Configuration.m_forceCatalogUpgrade || !versionFromCatalog.equals(VoltDB.instance().getVersionString())) { // Patch the buildinfo. String versionFromVoltDB = VoltDB.instance().getVersionString(); buildInfoLines[0] = versionFromVoltDB; buildInfoLines[1] = String.format("voltdb-auto-upgrade-to-%s", versionFromVoltDB); byte[] buildInfoBytes = StringUtils.join(buildInfoLines, "\n").getBytes(); outputJar.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, buildInfoBytes); // Gather DDL files for re-compilation List<VoltCompilerReader> ddlReaderList = new ArrayList<>(); Entry<String, byte[]> entry = outputJar.firstEntry(); while (entry != null) { String path = entry.getKey(); // ENG-12980: only look for auto-gen.ddl on root directory if (AUTOGEN_DDL_FILE_NAME.equalsIgnoreCase(path)) { ddlReaderList.add(new VoltCompilerJarFileReader(outputJar, path)); break; } entry = outputJar.higherEntry(entry.getKey()); } if (ddlReaderList.size() == 0) { // did not find auto generated DDL file during upgrade throw new IOException("Could not find " + AUTOGEN_DDL_FILE_NAME + " in the catalog " + "compiled by VoltDB " + versionFromCatalog); } // Use the in-memory jarfile-provided class loader so that procedure // classes can be found and copied to the new file that gets written. ClassLoader originalClassLoader = m_classLoader; // Compile and save the file to voltdbroot. Assume it's a test environment if there // is no catalog context available. String jarName = String.format("catalog-%s.jar", versionFromVoltDB); String textName = String.format("catalog-%s.out", versionFromVoltDB); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); final String outputJarPath = (catalogContext != null ? new File(VoltDB.instance().getVoltDBRootPath(), jarName).getPath() : VoltDB.Configuration.getPathToCatalogForTest(jarName)); // Place the compiler output in a text file in the same folder. final String outputTextPath = (catalogContext != null ? new File(VoltDB.instance().getVoltDBRootPath(), textName).getPath() : VoltDB.Configuration.getPathToCatalogForTest(textName)); try { m_classLoader = outputJar.getLoader(); consoleLog.info(String.format( "Version %s catalog will be automatically upgraded to version %s.", versionFromCatalog, versionFromVoltDB)); // Do the compilation work. boolean success = compileInternalToFile(outputJarPath, null, null, ddlReaderList, outputJar); // Sanitize the *.sql files in the jarfile so that only the autogenerated // canonical DDL file will be used for future compilations // Bomb out if we failed to generate the canonical DDL if (success) { boolean foundCanonicalDDL = false; entry = outputJar.firstEntry(); while (entry != null) { String path = entry.getKey(); if (path.toLowerCase().endsWith(".sql")) { if (!path.toLowerCase().equals(AUTOGEN_DDL_FILE_NAME)) { outputJar.remove(path); } else { foundCanonicalDDL = true; } } entry = outputJar.higherEntry(entry.getKey()); } success = foundCanonicalDDL; } if (success) { // Set up the return string. upgradedFromVersion = versionFromCatalog; } // Summarize the results to a file. // Briefly log success or failure and mention the output text file. PrintStream outputStream = new PrintStream(outputTextPath); try { if (success) { summarizeSuccess(outputStream, outputStream, outputJarPath); consoleLog.info(String.format( "The catalog was automatically upgraded from " + "version %s to %s and saved to \"%s\". " + "Compiler output is available in \"%s\".", versionFromCatalog, versionFromVoltDB, outputJarPath, outputTextPath)); } else { summarizeErrors(outputStream, outputStream); outputStream.close(); compilerLog.error("Catalog upgrade failed."); compilerLog.info(String.format( "Had attempted to perform an automatic version upgrade of a " + "catalog that was compiled by an older %s version of VoltDB, " + "but the automatic upgrade failed. The cluster will not be " + "able to start until the incompatibility is fixed. " + "Try re-compiling the catalog with the newer %s version " + "of the VoltDB compiler. Compiler output from the failed " + "upgrade is available in \"%s\".", versionFromCatalog, versionFromVoltDB, outputTextPath)); throw new IOException(String.format( "Catalog upgrade failed. You will need to recompile using voltdb compile.")); } } finally { outputStream.close(); } } catch (IOException ioe) { // Do nothing because this could come from the normal failure path throw ioe; } catch (Exception e) { compilerLog.error("Catalog upgrade failed with error:"); compilerLog.error(e.getMessage()); compilerLog.info(String.format( "Had attempted to perform an automatic version upgrade of a " + "catalog that was compiled by an older %s version of VoltDB, " + "but the automatic upgrade failed. The cluster will not be " + "able to start until the incompatibility is fixed. " + "Try re-compiling the catalog with the newer %s version " + "of the VoltDB compiler. Compiler output from the failed " + "upgrade is available in \"%s\".", versionFromCatalog, versionFromVoltDB, outputTextPath)); throw new IOException(String.format( "Catalog upgrade failed. You will need to recompile using voltdb compile.")); } finally { // Restore the original class loader m_classLoader = originalClassLoader; } } return upgradedFromVersion; } }
public class class_name { public String upgradeCatalogAsNeeded(InMemoryJarfile outputJar) throws IOException { // getBuildInfoFromJar() performs some validation. String[] buildInfoLines = CatalogUtil.getBuildInfoFromJar(outputJar); String versionFromCatalog = buildInfoLines[0]; // Set if an upgrade happens. String upgradedFromVersion = null; // Check if it's compatible (or the upgrade is being forced). // getConfig() may return null if it's being mocked for a test. if ( VoltDB.Configuration.m_forceCatalogUpgrade || !versionFromCatalog.equals(VoltDB.instance().getVersionString())) { // Patch the buildinfo. String versionFromVoltDB = VoltDB.instance().getVersionString(); buildInfoLines[0] = versionFromVoltDB; buildInfoLines[1] = String.format("voltdb-auto-upgrade-to-%s", versionFromVoltDB); byte[] buildInfoBytes = StringUtils.join(buildInfoLines, "\n").getBytes(); outputJar.put(CatalogUtil.CATALOG_BUILDINFO_FILENAME, buildInfoBytes); // Gather DDL files for re-compilation List<VoltCompilerReader> ddlReaderList = new ArrayList<>(); Entry<String, byte[]> entry = outputJar.firstEntry(); while (entry != null) { String path = entry.getKey(); // ENG-12980: only look for auto-gen.ddl on root directory if (AUTOGEN_DDL_FILE_NAME.equalsIgnoreCase(path)) { ddlReaderList.add(new VoltCompilerJarFileReader(outputJar, path)); // depends on control dependency: [if], data = [none] break; } entry = outputJar.higherEntry(entry.getKey()); // depends on control dependency: [while], data = [(entry] } if (ddlReaderList.size() == 0) { // did not find auto generated DDL file during upgrade throw new IOException("Could not find " + AUTOGEN_DDL_FILE_NAME + " in the catalog " + "compiled by VoltDB " + versionFromCatalog); } // Use the in-memory jarfile-provided class loader so that procedure // classes can be found and copied to the new file that gets written. ClassLoader originalClassLoader = m_classLoader; // Compile and save the file to voltdbroot. Assume it's a test environment if there // is no catalog context available. String jarName = String.format("catalog-%s.jar", versionFromVoltDB); String textName = String.format("catalog-%s.out", versionFromVoltDB); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); final String outputJarPath = (catalogContext != null ? new File(VoltDB.instance().getVoltDBRootPath(), jarName).getPath() : VoltDB.Configuration.getPathToCatalogForTest(jarName)); // Place the compiler output in a text file in the same folder. final String outputTextPath = (catalogContext != null ? new File(VoltDB.instance().getVoltDBRootPath(), textName).getPath() : VoltDB.Configuration.getPathToCatalogForTest(textName)); try { m_classLoader = outputJar.getLoader(); // depends on control dependency: [try], data = [none] consoleLog.info(String.format( "Version %s catalog will be automatically upgraded to version %s.", versionFromCatalog, versionFromVoltDB)); // depends on control dependency: [try], data = [none] // Do the compilation work. boolean success = compileInternalToFile(outputJarPath, null, null, ddlReaderList, outputJar); // Sanitize the *.sql files in the jarfile so that only the autogenerated // canonical DDL file will be used for future compilations // Bomb out if we failed to generate the canonical DDL if (success) { boolean foundCanonicalDDL = false; entry = outputJar.firstEntry(); // depends on control dependency: [if], data = [none] while (entry != null) { String path = entry.getKey(); if (path.toLowerCase().endsWith(".sql")) { if (!path.toLowerCase().equals(AUTOGEN_DDL_FILE_NAME)) { outputJar.remove(path); // depends on control dependency: [if], data = [none] } else { foundCanonicalDDL = true; // depends on control dependency: [if], data = [none] } } entry = outputJar.higherEntry(entry.getKey()); // depends on control dependency: [while], data = [(entry] } success = foundCanonicalDDL; // depends on control dependency: [if], data = [none] } if (success) { // Set up the return string. upgradedFromVersion = versionFromCatalog; // depends on control dependency: [if], data = [none] } // Summarize the results to a file. // Briefly log success or failure and mention the output text file. PrintStream outputStream = new PrintStream(outputTextPath); try { if (success) { summarizeSuccess(outputStream, outputStream, outputJarPath); // depends on control dependency: [if], data = [none] consoleLog.info(String.format( "The catalog was automatically upgraded from " + "version %s to %s and saved to \"%s\". " + "Compiler output is available in \"%s\".", versionFromCatalog, versionFromVoltDB, outputJarPath, outputTextPath)); // depends on control dependency: [if], data = [none] } else { summarizeErrors(outputStream, outputStream); // depends on control dependency: [if], data = [none] outputStream.close(); // depends on control dependency: [if], data = [none] compilerLog.error("Catalog upgrade failed."); // depends on control dependency: [if], data = [none] compilerLog.info(String.format( "Had attempted to perform an automatic version upgrade of a " + "catalog that was compiled by an older %s version of VoltDB, " + "but the automatic upgrade failed. The cluster will not be " + "able to start until the incompatibility is fixed. " + "Try re-compiling the catalog with the newer %s version " + "of the VoltDB compiler. Compiler output from the failed " + "upgrade is available in \"%s\".", versionFromCatalog, versionFromVoltDB, outputTextPath)); // depends on control dependency: [if], data = [none] throw new IOException(String.format( "Catalog upgrade failed. You will need to recompile using voltdb compile.")); } } finally { outputStream.close(); } } catch (IOException ioe) { // Do nothing because this could come from the normal failure path throw ioe; } // depends on control dependency: [catch], data = [none] catch (Exception e) { compilerLog.error("Catalog upgrade failed with error:"); compilerLog.error(e.getMessage()); compilerLog.info(String.format( "Had attempted to perform an automatic version upgrade of a " + "catalog that was compiled by an older %s version of VoltDB, " + "but the automatic upgrade failed. The cluster will not be " + "able to start until the incompatibility is fixed. " + "Try re-compiling the catalog with the newer %s version " + "of the VoltDB compiler. Compiler output from the failed " + "upgrade is available in \"%s\".", versionFromCatalog, versionFromVoltDB, outputTextPath)); throw new IOException(String.format( "Catalog upgrade failed. You will need to recompile using voltdb compile.")); } // depends on control dependency: [catch], data = [none] finally { // Restore the original class loader m_classLoader = originalClassLoader; } } return upgradedFromVersion; } }
public class class_name { public Principal mapPrincipal(String name) { if (principals != null) { String mapping = principals.get(name); if (mapping != null) { return new SimplePrincipal(mapping); } } return null; } }
public class class_name { public Principal mapPrincipal(String name) { if (principals != null) { String mapping = principals.get(name); if (mapping != null) { return new SimplePrincipal(mapping); // depends on control dependency: [if], data = [(mapping] } } return null; } }
public class class_name { private SoyValueProvider convertLazy(@Nullable final Object obj) { SoyValueProvider convertedPrimitive = convertCheap(obj); if (convertedPrimitive != null) { return convertedPrimitive; } else { return new SoyAbstractCachingValueProvider() { @Override protected SoyValue compute() { return convertNonPrimitive(obj).resolve(); } @Override public RenderResult status() { return RenderResult.done(); } }; } } }
public class class_name { private SoyValueProvider convertLazy(@Nullable final Object obj) { SoyValueProvider convertedPrimitive = convertCheap(obj); if (convertedPrimitive != null) { return convertedPrimitive; // depends on control dependency: [if], data = [none] } else { return new SoyAbstractCachingValueProvider() { @Override protected SoyValue compute() { return convertNonPrimitive(obj).resolve(); } @Override public RenderResult status() { return RenderResult.done(); } }; // depends on control dependency: [if], data = [none] } } }
public class class_name { public String getFirst(String name) { List<String> values = delegate.get(name); if (values != null) { if (values.size() > 0) { return values.get(0); } } return null; } }
public class class_name { public String getFirst(String name) { List<String> values = delegate.get(name); if (values != null) { if (values.size() > 0) { return values.get(0); // depends on control dependency: [if], data = [0)] } } return null; } }
public class class_name { @CanIgnoreReturnValue int copyIntoArray(Object[] dst, int offset) { for (E e : this) { dst[offset++] = e; } return offset; } }
public class class_name { @CanIgnoreReturnValue int copyIntoArray(Object[] dst, int offset) { for (E e : this) { dst[offset++] = e; // depends on control dependency: [for], data = [e] } return offset; } }
public class class_name { public static String padStart(final String aString, final String aPadding, final int aRepeatCount) { if (aRepeatCount != 0) { final StringBuilder buffer = new StringBuilder(); for (int index = 0; index < aRepeatCount; index++) { buffer.append(aPadding); } return buffer.append(aString).toString(); } return aString; } }
public class class_name { public static String padStart(final String aString, final String aPadding, final int aRepeatCount) { if (aRepeatCount != 0) { final StringBuilder buffer = new StringBuilder(); for (int index = 0; index < aRepeatCount; index++) { buffer.append(aPadding); // depends on control dependency: [for], data = [none] } return buffer.append(aString).toString(); // depends on control dependency: [if], data = [none] } return aString; } }
public class class_name { public void appendAllParametersDetails(StringBuilder out, int indentCount, String indent, List<ParameterDescription> sortedParameters) { if (sortedParameters.size() > 0) { out.append(indent).append(" Options:\n"); } // Calculate prefix indent int prefixIndent = 0; for (ParameterDescription pd : sortedParameters) { WrappedParameter parameter = pd.getParameter(); String prefix = (parameter.required() ? "* " : " ") + pd.getNames(); if (prefix.length() > prefixIndent) { prefixIndent = prefix.length(); } } // Append parameters for (ParameterDescription pd : sortedParameters) { WrappedParameter parameter = pd.getParameter(); String prefix = (parameter.required() ? "* " : " ") + pd.getNames(); out.append(indent) .append(" ") .append(prefix) .append(s(prefixIndent - prefix.length())) .append(" "); final int initialLinePrefixLength = indent.length() + prefixIndent + 3; // Generate description String description = pd.getDescription(); Object def = pd.getDefault(); if (pd.isDynamicParameter()) { String syntax = "(syntax: " + parameter.names()[0] + "key" + parameter.getAssignment() + "value)"; description += (description.length() == 0 ? "" : " ") + syntax; } if (def != null && !pd.isHelp()) { String displayedDef = Strings.isStringEmpty(def.toString()) ? "<empty string>" : def.toString(); String defaultText = "(default: " + (parameter.password() ? "********" : displayedDef) + ")"; description += (description.length() == 0 ? "" : " ") + defaultText; } Class<?> type = pd.getParameterized().getType(); if (type.isEnum()) { String valueList = EnumSet.allOf((Class<? extends Enum>) type).toString(); // Prevent duplicate values list, since it is set as 'Options: [values]' if the description // of an enum field is empty in ParameterDescription#init(..) if (!description.contains("Options: " + valueList)) { String possibleValues = "(values: " + valueList + ")"; description += (description.length() == 0 ? "" : " ") + possibleValues; } } // Append description // The magic value 3 is the number of spaces between the name of the option and its description // in DefaultUsageFormatter#appendCommands(..) wrapDescription(out, indentCount + prefixIndent - 3, initialLinePrefixLength, description); out.append("\n"); } } }
public class class_name { public void appendAllParametersDetails(StringBuilder out, int indentCount, String indent, List<ParameterDescription> sortedParameters) { if (sortedParameters.size() > 0) { out.append(indent).append(" Options:\n"); // depends on control dependency: [if], data = [none] } // Calculate prefix indent int prefixIndent = 0; for (ParameterDescription pd : sortedParameters) { WrappedParameter parameter = pd.getParameter(); String prefix = (parameter.required() ? "* " : " ") + pd.getNames(); if (prefix.length() > prefixIndent) { prefixIndent = prefix.length(); // depends on control dependency: [if], data = [none] } } // Append parameters for (ParameterDescription pd : sortedParameters) { WrappedParameter parameter = pd.getParameter(); String prefix = (parameter.required() ? "* " : " ") + pd.getNames(); out.append(indent) .append(" ") .append(prefix) .append(s(prefixIndent - prefix.length())) .append(" "); // depends on control dependency: [for], data = [none] final int initialLinePrefixLength = indent.length() + prefixIndent + 3; // Generate description String description = pd.getDescription(); Object def = pd.getDefault(); if (pd.isDynamicParameter()) { String syntax = "(syntax: " + parameter.names()[0] + "key" + parameter.getAssignment() + "value)"; description += (description.length() == 0 ? "" : " ") + syntax; // depends on control dependency: [if], data = [none] } if (def != null && !pd.isHelp()) { String displayedDef = Strings.isStringEmpty(def.toString()) ? "<empty string>" : def.toString(); String defaultText = "(default: " + (parameter.password() ? "********" : displayedDef) + ")"; description += (description.length() == 0 ? "" : " ") + defaultText; } Class<?> type = pd.getParameterized().getType(); if (type.isEnum()) { String valueList = EnumSet.allOf((Class<? extends Enum>) type).toString(); // Prevent duplicate values list, since it is set as 'Options: [values]' if the description // of an enum field is empty in ParameterDescription#init(..) if (!description.contains("Options: " + valueList)) { String possibleValues = "(values: " + valueList + ")"; description += (description.length() == 0 ? "" : " ") + possibleValues; } } // Append description // The magic value 3 is the number of spaces between the name of the option and its description // in DefaultUsageFormatter#appendCommands(..) wrapDescription(out, indentCount + prefixIndent - 3, initialLinePrefixLength, description); // depends on control dependency: [for], data = [none] out.append("\n"); // depends on control dependency: [for], data = [none] } } }
public class class_name { public static <T> T getRandomPosition(List<T> list) { if (list == null || list.isEmpty()) { return null; } return list.get(getRandomInteger(list.size() - 1)); } }
public class class_name { public static <T> T getRandomPosition(List<T> list) { if (list == null || list.isEmpty()) { return null; // depends on control dependency: [if], data = [none] } return list.get(getRandomInteger(list.size() - 1)); } }
public class class_name { <T> T access(CallInfo callInfo, Access<T> op) { try { return op.execute(); } catch (SQLException e) { if (isEnabled(DB.Option.LOG_DATABASE_EXCEPTIONS)) { log.write(callInfo, e); } throw new DBExecutionException(e); } } }
public class class_name { <T> T access(CallInfo callInfo, Access<T> op) { try { return op.execute(); // depends on control dependency: [try], data = [none] } catch (SQLException e) { if (isEnabled(DB.Option.LOG_DATABASE_EXCEPTIONS)) { log.write(callInfo, e); // depends on control dependency: [if], data = [none] } throw new DBExecutionException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static boolean checkPostExpressions( Node n, Node expressionRoot, Predicate<Node> predicate) { for (Node p = n; p != expressionRoot; p = p.getParent()) { for (Node cur = p.getNext(); cur != null; cur = cur.getNext()) { if (predicate.apply(cur)) { return true; } } } return false; } }
public class class_name { private static boolean checkPostExpressions( Node n, Node expressionRoot, Predicate<Node> predicate) { for (Node p = n; p != expressionRoot; p = p.getParent()) { for (Node cur = p.getNext(); cur != null; cur = cur.getNext()) { if (predicate.apply(cur)) { return true; // depends on control dependency: [if], data = [none] } } } return false; } }
public class class_name { void externalScan(Set<File> notifiedCreated, Set<File> notifiedDeleted, Set<File> notifiedModified, boolean doFilterPaths, String listenerFilter) { // Don't perform the external scan if this monitor holder is paused if (isStopped) return; // only do anything if this is an 'external' monitor if (!!!FileMonitor.MONITOR_TYPE_EXTERNAL.equals(monitorRef.getProperty(FileMonitor.MONITOR_TYPE))) return; // Give monitoring activity on other threads a chance to catch up before requesting a scan // (This is most likely to affect unit test behaviour rather than mbean invocations, but be safe) Thread.yield(); // Multiple threads can call the FileNotificationMBean simultaneously so we need to lock scanLock.lock(); try { // Always try destroy when we obtain the lock: it will return true if this is in destroy or destroyed state // Also (after we have tried doDestroy) ensure that we are in active state if (!doDestroy() && (monitorState.get() == MonitorState.ACTIVE.ordinal())) { if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(this, tc, "File monitor scan: begin", updateMonitors); } List<File> actualCreated = new ArrayList<File>(); List<File> actualDeleted = new ArrayList<File>(); List<File> actualModified = new ArrayList<File>(); scanForUpdates(actualCreated, actualDeleted, actualModified); // use the correct case forms of the files we found in our internal scan Set<File> created = PathUtils.fixPathFiles(actualCreated); Set<File> deleted = PathUtils.fixPathFiles(actualDeleted); Set<File> modified = PathUtils.fixPathFiles(actualModified); // SPI PathUtils.fixpathFiles returns an empty collection if the file // list is empty, create an actual set so we can add to it later if needed if (created == Collections.EMPTY_SET) created = new HashSet<File>(); if (deleted == Collections.EMPTY_SET) deleted = new HashSet<File>(); if (modified == Collections.EMPTY_SET) modified = new HashSet<File>(); // Take the previously unnotified/unrequested changes // and resolve them against the result of the latest // filesystem scan to make sure they are still // valid resolveChangesForExternalScan(unnotifiedFileCreates, unnotifiedFileDeletes, unnotifiedFileModifies, created, deleted, modified); // Now merge the result of the current filesystem scan with // previous unnotified changes. This represents the complete // set of valid/current choices they can now notify about created.addAll(unnotifiedFileCreates); deleted.addAll(unnotifiedFileDeletes); modified.addAll(unnotifiedFileModifies); // We are going to rebuild these lists from anything left over in the next block unnotifiedFileCreates.clear(); unnotifiedFileDeletes.clear(); unnotifiedFileModifies.clear(); // If a filter was specified, all pending updates are to be processed. if (doFilterPaths) { // Now take the notified changes and compare it against all the possible // valid choices, unrequested changes are placed into the unnotified set // so they can be used by the caller on subsequent calls filterSets(created, notifiedCreated, unnotifiedFileCreates); filterSets(deleted, notifiedDeleted, unnotifiedFileDeletes); filterSets(modified, notifiedModified, unnotifiedFileModifies); } if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) { // changes were discovered: trace & call the registered file monitor if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(this, tc, "File monitor scan: end; resources changed", created.size() + " created", modified.size() + " modified", deleted.size() + " deleted"); } if (monitor != null) { try { // If we are processing all pending events, call the extended version of the FileMonitor. if (!doFilterPaths && monitor instanceof com.ibm.ws.kernel.filemonitor.FileMonitor) { ((com.ibm.ws.kernel.filemonitor.FileMonitor) monitor).onChange(created, modified, deleted, listenerFilter); } else { monitor.onChange(created, modified, deleted); } } catch (RuntimeException e) { // FFDC instrumentation will go here // Catch the exception so we can FFDC it // Don't increment the exception counter since this is externally triggered Tr.warning(tc, "fileMonitorException", created, modified, deleted, monitor.getClass(), e.getLocalizedMessage()); } } } else if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { // If super detailed trace is enabled, we trace the begin/end of all file scans Tr.debug(this, tc, "File monitor scan: end; no changes"); } } } catch (RuntimeException e) { // TODO: MUST CATCH exceptions here (to at least get FFDC)... ick } finally { try { doDestroy(); // always attempt destroy while we hold the lock } finally { scanLock.unlock(); } } } }
public class class_name { void externalScan(Set<File> notifiedCreated, Set<File> notifiedDeleted, Set<File> notifiedModified, boolean doFilterPaths, String listenerFilter) { // Don't perform the external scan if this monitor holder is paused if (isStopped) return; // only do anything if this is an 'external' monitor if (!!!FileMonitor.MONITOR_TYPE_EXTERNAL.equals(monitorRef.getProperty(FileMonitor.MONITOR_TYPE))) return; // Give monitoring activity on other threads a chance to catch up before requesting a scan // (This is most likely to affect unit test behaviour rather than mbean invocations, but be safe) Thread.yield(); // Multiple threads can call the FileNotificationMBean simultaneously so we need to lock scanLock.lock(); try { // Always try destroy when we obtain the lock: it will return true if this is in destroy or destroyed state // Also (after we have tried doDestroy) ensure that we are in active state if (!doDestroy() && (monitorState.get() == MonitorState.ACTIVE.ordinal())) { if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(this, tc, "File monitor scan: begin", updateMonitors); // depends on control dependency: [if], data = [none] } List<File> actualCreated = new ArrayList<File>(); List<File> actualDeleted = new ArrayList<File>(); List<File> actualModified = new ArrayList<File>(); scanForUpdates(actualCreated, actualDeleted, actualModified); // depends on control dependency: [if], data = [none] // use the correct case forms of the files we found in our internal scan Set<File> created = PathUtils.fixPathFiles(actualCreated); Set<File> deleted = PathUtils.fixPathFiles(actualDeleted); Set<File> modified = PathUtils.fixPathFiles(actualModified); // SPI PathUtils.fixpathFiles returns an empty collection if the file // list is empty, create an actual set so we can add to it later if needed if (created == Collections.EMPTY_SET) created = new HashSet<File>(); if (deleted == Collections.EMPTY_SET) deleted = new HashSet<File>(); if (modified == Collections.EMPTY_SET) modified = new HashSet<File>(); // Take the previously unnotified/unrequested changes // and resolve them against the result of the latest // filesystem scan to make sure they are still // valid resolveChangesForExternalScan(unnotifiedFileCreates, unnotifiedFileDeletes, unnotifiedFileModifies, created, deleted, modified); // depends on control dependency: [if], data = [none] // Now merge the result of the current filesystem scan with // previous unnotified changes. This represents the complete // set of valid/current choices they can now notify about created.addAll(unnotifiedFileCreates); // depends on control dependency: [if], data = [none] deleted.addAll(unnotifiedFileDeletes); // depends on control dependency: [if], data = [none] modified.addAll(unnotifiedFileModifies); // depends on control dependency: [if], data = [none] // We are going to rebuild these lists from anything left over in the next block unnotifiedFileCreates.clear(); // depends on control dependency: [if], data = [none] unnotifiedFileDeletes.clear(); // depends on control dependency: [if], data = [none] unnotifiedFileModifies.clear(); // depends on control dependency: [if], data = [none] // If a filter was specified, all pending updates are to be processed. if (doFilterPaths) { // Now take the notified changes and compare it against all the possible // valid choices, unrequested changes are placed into the unnotified set // so they can be used by the caller on subsequent calls filterSets(created, notifiedCreated, unnotifiedFileCreates); // depends on control dependency: [if], data = [none] filterSets(deleted, notifiedDeleted, unnotifiedFileDeletes); // depends on control dependency: [if], data = [none] filterSets(modified, notifiedModified, unnotifiedFileModifies); // depends on control dependency: [if], data = [none] } if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) { // changes were discovered: trace & call the registered file monitor if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(this, tc, "File monitor scan: end; resources changed", created.size() + " created", modified.size() + " modified", deleted.size() + " deleted"); // depends on control dependency: [if], data = [none] } if (monitor != null) { try { // If we are processing all pending events, call the extended version of the FileMonitor. if (!doFilterPaths && monitor instanceof com.ibm.ws.kernel.filemonitor.FileMonitor) { ((com.ibm.ws.kernel.filemonitor.FileMonitor) monitor).onChange(created, modified, deleted, listenerFilter); // depends on control dependency: [if], data = [none] } else { monitor.onChange(created, modified, deleted); // depends on control dependency: [if], data = [none] } } catch (RuntimeException e) { // FFDC instrumentation will go here // Catch the exception so we can FFDC it // Don't increment the exception counter since this is externally triggered Tr.warning(tc, "fileMonitorException", created, modified, deleted, monitor.getClass(), e.getLocalizedMessage()); } // depends on control dependency: [catch], data = [none] } } else if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { // If super detailed trace is enabled, we trace the begin/end of all file scans Tr.debug(this, tc, "File monitor scan: end; no changes"); // depends on control dependency: [if], data = [none] } } } catch (RuntimeException e) { // TODO: MUST CATCH exceptions here (to at least get FFDC)... ick } finally { // depends on control dependency: [catch], data = [none] try { doDestroy(); // always attempt destroy while we hold the lock // depends on control dependency: [try], data = [none] } finally { scanLock.unlock(); } } } }
public class class_name { private void drawSlices(Canvas canvas) { final PieChartData data = dataProvider.getPieChartData(); final float sliceScale = 360f / maxSum; float lastAngle = rotation; int sliceIndex = 0; for (SliceValue sliceValue : data.getValues()) { final float angle = Math.abs(sliceValue.getValue()) * sliceScale; if (isTouched() && selectedValue.getFirstIndex() == sliceIndex) { drawSlice(canvas, sliceValue, lastAngle, angle, MODE_HIGHLIGHT); } else { drawSlice(canvas, sliceValue, lastAngle, angle, MODE_DRAW); } lastAngle += angle; ++sliceIndex; } } }
public class class_name { private void drawSlices(Canvas canvas) { final PieChartData data = dataProvider.getPieChartData(); final float sliceScale = 360f / maxSum; float lastAngle = rotation; int sliceIndex = 0; for (SliceValue sliceValue : data.getValues()) { final float angle = Math.abs(sliceValue.getValue()) * sliceScale; if (isTouched() && selectedValue.getFirstIndex() == sliceIndex) { drawSlice(canvas, sliceValue, lastAngle, angle, MODE_HIGHLIGHT); // depends on control dependency: [if], data = [none] } else { drawSlice(canvas, sliceValue, lastAngle, angle, MODE_DRAW); // depends on control dependency: [if], data = [none] } lastAngle += angle; // depends on control dependency: [for], data = [none] ++sliceIndex; // depends on control dependency: [for], data = [none] } } }
public class class_name { public void setFaces(java.util.Collection<FaceDetection> faces) { if (faces == null) { this.faces = null; return; } this.faces = new java.util.ArrayList<FaceDetection>(faces); } }
public class class_name { public void setFaces(java.util.Collection<FaceDetection> faces) { if (faces == null) { this.faces = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.faces = new java.util.ArrayList<FaceDetection>(faces); } }
public class class_name { public static <T> void preHandleInsert(T t) { if(t == null) { return; } List<Field> fields = DOInfoReader.getColumns(t.getClass()); if(fields.isEmpty()) { return; } for(Field field : fields) { Column column = field.getAnnotation(Column.class); if(column.softDelete() != null && column.softDelete().length == 2 && !column.softDelete()[0].trim().isEmpty() && !column.softDelete()[1].trim().isEmpty()) { Object delete = DOInfoReader.getValue(field, t); if(delete == null) { DOInfoReader.setValue(field, t, column.softDelete()[0]); } } if(column.setTimeWhenInsert() && Date.class.isAssignableFrom(field.getType())) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, new Date()); } } if(column.insertDefault() != null && !column.insertDefault().isEmpty()) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, column.insertDefault()); } } if(column.setRandomStringWhenInsert()) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, UUID.randomUUID().toString().replace("-", "").substring(0, 32)); } } if(column.casVersion()) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, 1); } } } } }
public class class_name { public static <T> void preHandleInsert(T t) { if(t == null) { return; // depends on control dependency: [if], data = [none] } List<Field> fields = DOInfoReader.getColumns(t.getClass()); if(fields.isEmpty()) { return; // depends on control dependency: [if], data = [none] } for(Field field : fields) { Column column = field.getAnnotation(Column.class); if(column.softDelete() != null && column.softDelete().length == 2 && !column.softDelete()[0].trim().isEmpty() && !column.softDelete()[1].trim().isEmpty()) { Object delete = DOInfoReader.getValue(field, t); if(delete == null) { DOInfoReader.setValue(field, t, column.softDelete()[0]); // depends on control dependency: [if], data = [none] } } if(column.setTimeWhenInsert() && Date.class.isAssignableFrom(field.getType())) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, new Date()); // depends on control dependency: [if], data = [none] } } if(column.insertDefault() != null && !column.insertDefault().isEmpty()) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, column.insertDefault()); // depends on control dependency: [if], data = [none] } } if(column.setRandomStringWhenInsert()) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, UUID.randomUUID().toString().replace("-", "").substring(0, 32)); // depends on control dependency: [if], data = [none] } } if(column.casVersion()) { if(DOInfoReader.getValue(field, t) == null) { DOInfoReader.setValue(field, t, 1); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public static void orderBestPatterns(ResultPoint[] patterns) { // Find distances between pattern centers float zeroOneDistance = distance(patterns[0], patterns[1]); float oneTwoDistance = distance(patterns[1], patterns[2]); float zeroTwoDistance = distance(patterns[0], patterns[2]); ResultPoint pointA; ResultPoint pointB; ResultPoint pointC; // Assume one closest to other two is B; A and C will just be guesses at first if (oneTwoDistance >= zeroOneDistance && oneTwoDistance >= zeroTwoDistance) { pointB = patterns[0]; pointA = patterns[1]; pointC = patterns[2]; } else if (zeroTwoDistance >= oneTwoDistance && zeroTwoDistance >= zeroOneDistance) { pointB = patterns[1]; pointA = patterns[0]; pointC = patterns[2]; } else { pointB = patterns[2]; pointA = patterns[0]; pointC = patterns[1]; } // Use cross product to figure out whether A and C are correct or flipped. // This asks whether BC x BA has a positive z component, which is the arrangement // we want for A, B, C. If it's negative, then we've got it flipped around and // should swap A and C. if (crossProductZ(pointA, pointB, pointC) < 0.0f) { ResultPoint temp = pointA; pointA = pointC; pointC = temp; } patterns[0] = pointA; patterns[1] = pointB; patterns[2] = pointC; } }
public class class_name { public static void orderBestPatterns(ResultPoint[] patterns) { // Find distances between pattern centers float zeroOneDistance = distance(patterns[0], patterns[1]); float oneTwoDistance = distance(patterns[1], patterns[2]); float zeroTwoDistance = distance(patterns[0], patterns[2]); ResultPoint pointA; ResultPoint pointB; ResultPoint pointC; // Assume one closest to other two is B; A and C will just be guesses at first if (oneTwoDistance >= zeroOneDistance && oneTwoDistance >= zeroTwoDistance) { pointB = patterns[0]; // depends on control dependency: [if], data = [none] pointA = patterns[1]; // depends on control dependency: [if], data = [none] pointC = patterns[2]; // depends on control dependency: [if], data = [none] } else if (zeroTwoDistance >= oneTwoDistance && zeroTwoDistance >= zeroOneDistance) { pointB = patterns[1]; // depends on control dependency: [if], data = [none] pointA = patterns[0]; // depends on control dependency: [if], data = [none] pointC = patterns[2]; // depends on control dependency: [if], data = [none] } else { pointB = patterns[2]; // depends on control dependency: [if], data = [none] pointA = patterns[0]; // depends on control dependency: [if], data = [none] pointC = patterns[1]; // depends on control dependency: [if], data = [none] } // Use cross product to figure out whether A and C are correct or flipped. // This asks whether BC x BA has a positive z component, which is the arrangement // we want for A, B, C. If it's negative, then we've got it flipped around and // should swap A and C. if (crossProductZ(pointA, pointB, pointC) < 0.0f) { ResultPoint temp = pointA; pointA = pointC; // depends on control dependency: [if], data = [none] pointC = temp; // depends on control dependency: [if], data = [none] } patterns[0] = pointA; patterns[1] = pointB; patterns[2] = pointC; } }
public class class_name { public String makeString(final Instance _callInstance, final AbstractPrintQuery _print, final TargetMode _mode) throws EFapsException { final StringBuilder buf = new StringBuilder(); for (final Token token : this.tokens) { switch (token.type) { case EXPRESSION: Attribute attr = null; Object value = null; if (_print.getMainType().getAttributes().containsKey(token.value)) { attr = _print.getAttribute4Attribute(token.value); value = _print.getAttribute(token.value); } else { attr = _print.getAttribute4Select(token.value); value = _print.getSelect(token.value); } if (attr != null) { buf.append(attr.getAttributeType().getUIProvider().getStringValue( UIValue.get(null, attr, value))); } else if (value != null) { buf.append(value); } break; case TEXT: buf.append(token.value); break; default: break; } } return buf.toString(); } }
public class class_name { public String makeString(final Instance _callInstance, final AbstractPrintQuery _print, final TargetMode _mode) throws EFapsException { final StringBuilder buf = new StringBuilder(); for (final Token token : this.tokens) { switch (token.type) { case EXPRESSION: Attribute attr = null; Object value = null; if (_print.getMainType().getAttributes().containsKey(token.value)) { attr = _print.getAttribute4Attribute(token.value); // depends on control dependency: [if], data = [none] value = _print.getAttribute(token.value); // depends on control dependency: [if], data = [none] } else { attr = _print.getAttribute4Select(token.value); // depends on control dependency: [if], data = [none] value = _print.getSelect(token.value); // depends on control dependency: [if], data = [none] } if (attr != null) { buf.append(attr.getAttributeType().getUIProvider().getStringValue( UIValue.get(null, attr, value))); // depends on control dependency: [if], data = [(attr] } else if (value != null) { buf.append(value); // depends on control dependency: [if], data = [(value] } break; case TEXT: buf.append(token.value); break; default: break; } } return buf.toString(); } }
public class class_name { private Catalog compileCatalogInternal( final VoltCompilerReader cannonicalDDLIfAny, final Catalog previousCatalogIfAny, final List<VoltCompilerReader> ddlReaderList, final InMemoryJarfile jarOutput) { m_catalog = new Catalog(); // Initialize the catalog for one cluster m_catalog.execute("add / clusters cluster"); m_catalog.getClusters().get("cluster").setSecurityenabled(false); // shutdown and make a new hsqldb try { Database previousDBIfAny = null; if (previousCatalogIfAny != null) { previousDBIfAny = previousCatalogIfAny.getClusters().get("cluster").getDatabases().get("database"); } compileDatabaseNode(cannonicalDDLIfAny, previousDBIfAny, ddlReaderList, jarOutput); } catch (final VoltCompilerException e) { return null; } assert(m_catalog != null); // add epoch info to catalog final int epoch = (int)(TransactionIdManager.getEpoch() / 1000); m_catalog.getClusters().get("cluster").setLocalepoch(epoch); return m_catalog; } }
public class class_name { private Catalog compileCatalogInternal( final VoltCompilerReader cannonicalDDLIfAny, final Catalog previousCatalogIfAny, final List<VoltCompilerReader> ddlReaderList, final InMemoryJarfile jarOutput) { m_catalog = new Catalog(); // Initialize the catalog for one cluster m_catalog.execute("add / clusters cluster"); m_catalog.getClusters().get("cluster").setSecurityenabled(false); // shutdown and make a new hsqldb try { Database previousDBIfAny = null; if (previousCatalogIfAny != null) { previousDBIfAny = previousCatalogIfAny.getClusters().get("cluster").getDatabases().get("database"); // depends on control dependency: [if], data = [none] } compileDatabaseNode(cannonicalDDLIfAny, previousDBIfAny, ddlReaderList, jarOutput); // depends on control dependency: [try], data = [none] } catch (final VoltCompilerException e) { return null; } // depends on control dependency: [catch], data = [none] assert(m_catalog != null); // add epoch info to catalog final int epoch = (int)(TransactionIdManager.getEpoch() / 1000); m_catalog.getClusters().get("cluster").setLocalepoch(epoch); return m_catalog; } }
public class class_name { @Override public void notifyStandBy() { synchronized (qSync) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "notifyStandBy called with qStatus: " + qStatus); } if (qStatus == Q_STATUS.BYPASSED) { // no other writes waiting, so no queue thread in flight. Set the state for more caller thread processing qStatus = Q_STATUS.NOT_IN_USE; } else if (qStatus == Q_STATUS.STAND_BY) { // Requests have been put on the Queue while we were writing, The Queue thread waiting to service them can start now if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "notifyStandBy doing notify"); } qSync.notify(); } } } }
public class class_name { @Override public void notifyStandBy() { synchronized (qSync) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "notifyStandBy called with qStatus: " + qStatus); // depends on control dependency: [if], data = [none] } if (qStatus == Q_STATUS.BYPASSED) { // no other writes waiting, so no queue thread in flight. Set the state for more caller thread processing qStatus = Q_STATUS.NOT_IN_USE; // depends on control dependency: [if], data = [none] } else if (qStatus == Q_STATUS.STAND_BY) { // Requests have been put on the Queue while we were writing, The Queue thread waiting to service them can start now if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "notifyStandBy doing notify"); // depends on control dependency: [if], data = [none] } qSync.notify(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override public JSONObject toJSON(StaticRouting routing) { JSONObject o = new JSONObject(); o.put("type", getJSONId()); JSONArray a = new JSONArray(); Map<StaticRouting.NodesMap, Map<Link, Boolean>> routes = routing.getStaticRoutes(); for (Map.Entry<StaticRouting.NodesMap, Map<Link, Boolean>> e : routes.entrySet()) { StaticRouting.NodesMap nm = e.getKey(); JSONObject ao = new JSONObject(); ao.put("nodes_map", nodesMapToJSON(nm)); JSONArray links = new JSONArray(); Map<Link, Boolean> v = e.getValue(); for (Link l : v.keySet()) { JSONObject lo = new JSONObject(); lo.put("link", l.id()); lo.put("direction", routes.get(nm).get(l).toString()); links.add(lo); } ao.put("links", links); a.add(ao); } o.put(ROUTES_LABEL, a); return o; } }
public class class_name { @Override public JSONObject toJSON(StaticRouting routing) { JSONObject o = new JSONObject(); o.put("type", getJSONId()); JSONArray a = new JSONArray(); Map<StaticRouting.NodesMap, Map<Link, Boolean>> routes = routing.getStaticRoutes(); for (Map.Entry<StaticRouting.NodesMap, Map<Link, Boolean>> e : routes.entrySet()) { StaticRouting.NodesMap nm = e.getKey(); JSONObject ao = new JSONObject(); ao.put("nodes_map", nodesMapToJSON(nm)); // depends on control dependency: [for], data = [e] JSONArray links = new JSONArray(); Map<Link, Boolean> v = e.getValue(); for (Link l : v.keySet()) { JSONObject lo = new JSONObject(); lo.put("link", l.id()); // depends on control dependency: [for], data = [l] lo.put("direction", routes.get(nm).get(l).toString()); // depends on control dependency: [for], data = [l] links.add(lo); // depends on control dependency: [for], data = [l] } ao.put("links", links); // depends on control dependency: [for], data = [none] a.add(ao); // depends on control dependency: [for], data = [none] } o.put(ROUTES_LABEL, a); return o; } }
public class class_name { public final boolean getSet(char src[], int srcStart) { // leave most argument checking up to Java exceptions array=null; arrayOffset=bmpLength=length=0; length=src[srcStart++]; if ((length&0x8000) != 0) { /* there are supplementary values */ length&=0x7fff; if(src.length<(srcStart+1+length)) { length=0; throw new IndexOutOfBoundsException(); } bmpLength=src[srcStart++]; } else { /* only BMP values */ if(src.length<(srcStart+length)) { length=0; throw new IndexOutOfBoundsException(); } bmpLength=length; } array = new char[length]; System.arraycopy(src,srcStart,array,0,length); //arrayOffset=srcStart; return true; } }
public class class_name { public final boolean getSet(char src[], int srcStart) { // leave most argument checking up to Java exceptions array=null; arrayOffset=bmpLength=length=0; length=src[srcStart++]; if ((length&0x8000) != 0) { /* there are supplementary values */ length&=0x7fff; // depends on control dependency: [if], data = [none] if(src.length<(srcStart+1+length)) { length=0; // depends on control dependency: [if], data = [none] throw new IndexOutOfBoundsException(); } bmpLength=src[srcStart++]; // depends on control dependency: [if], data = [none] } else { /* only BMP values */ if(src.length<(srcStart+length)) { length=0; // depends on control dependency: [if], data = [none] throw new IndexOutOfBoundsException(); } bmpLength=length; // depends on control dependency: [if], data = [none] } array = new char[length]; System.arraycopy(src,srcStart,array,0,length); //arrayOffset=srcStart; return true; } }
public class class_name { protected void updateShowing(boolean fireEvents) { double scale = mapModel.getMapView().getCurrentScale(); if (visible) { boolean oldShowing = showing; showing = scale >= layerInfo.getMinimumScale().getPixelPerUnit() && scale <= layerInfo.getMaximumScale().getPixelPerUnit(); if (oldShowing != showing && fireEvents) { handlerManager.fireEvent(new LayerShownEvent(this, true)); } } else { showing = false; } } }
public class class_name { protected void updateShowing(boolean fireEvents) { double scale = mapModel.getMapView().getCurrentScale(); if (visible) { boolean oldShowing = showing; showing = scale >= layerInfo.getMinimumScale().getPixelPerUnit() && scale <= layerInfo.getMaximumScale().getPixelPerUnit(); // depends on control dependency: [if], data = [none] if (oldShowing != showing && fireEvents) { handlerManager.fireEvent(new LayerShownEvent(this, true)); // depends on control dependency: [if], data = [none] } } else { showing = false; // depends on control dependency: [if], data = [none] } } }
public class class_name { private static Object getFutureValue(ListenableFuture<?> future) { Object valueToSet; if (future instanceof TrustedFuture) { // Break encapsulation for TrustedFuture instances since we know that subclasses cannot // override .get() (since it is final) and therefore this is equivalent to calling .get() // and unpacking the exceptions like we do below (just much faster because it is a single // field read instead of a read, several branches and possibly creating exceptions). return ((AbstractFuture<?>) future).value; } else { // Otherwise calculate valueToSet by calling .get() try { Object v = getDone(future); valueToSet = v == null ? NULL : v; } catch (ExecutionException exception) { valueToSet = new Failure(exception.getCause()); } catch (CancellationException cancellation) { valueToSet = new Cancellation(false, cancellation); } catch (Throwable t) { valueToSet = new Failure(t); } } return valueToSet; } }
public class class_name { private static Object getFutureValue(ListenableFuture<?> future) { Object valueToSet; if (future instanceof TrustedFuture) { // Break encapsulation for TrustedFuture instances since we know that subclasses cannot // override .get() (since it is final) and therefore this is equivalent to calling .get() // and unpacking the exceptions like we do below (just much faster because it is a single // field read instead of a read, several branches and possibly creating exceptions). return ((AbstractFuture<?>) future).value; // depends on control dependency: [if], data = [none] } else { // Otherwise calculate valueToSet by calling .get() try { Object v = getDone(future); valueToSet = v == null ? NULL : v; // depends on control dependency: [try], data = [none] } catch (ExecutionException exception) { valueToSet = new Failure(exception.getCause()); } catch (CancellationException cancellation) { // depends on control dependency: [catch], data = [none] valueToSet = new Cancellation(false, cancellation); } catch (Throwable t) { // depends on control dependency: [catch], data = [none] valueToSet = new Failure(t); } // depends on control dependency: [catch], data = [none] } return valueToSet; } }
public class class_name { @Override public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int taskNumber, int taskAttemptNumber, int locality) { TaskType taskType = TaskType.MAP; LoggedTask loggedTask = getLoggedTask(taskType, taskNumber); if (loggedTask == null) { // TODO insert parameters TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0); return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber, taskNumber, locality); } LoggedTaskAttempt loggedAttempt = getLoggedTaskAttempt(taskType, taskNumber, taskAttemptNumber); if (loggedAttempt == null) { // Task exists, but attempt is missing. TaskInfo taskInfo = getTaskInfo(loggedTask); return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber, taskNumber, locality); } else { // Task and TaskAttempt both exist. if (loggedAttempt.getResult() == Values.KILLED) { TaskInfo taskInfo = getTaskInfo(loggedTask); return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber, taskNumber, locality); } else if (loggedAttempt.getResult() == Values.FAILED) { /** * FAILED attempt is not affected by locality however, made-up FAILED * attempts ARE affected by locality, since statistics are present for * attempts of different locality. */ return getTaskAttemptInfo(loggedTask, loggedAttempt); } else if (loggedAttempt.getResult() == Values.SUCCESS) { int loggedLocality = getLocality(loggedTask, loggedAttempt); if (locality == loggedLocality) { return getTaskAttemptInfo(loggedTask, loggedAttempt); } else { // attempt succeeded in trace. It is scheduled in simulation with // a different locality. return scaleInfo(loggedTask, loggedAttempt, locality, loggedLocality, rackLocalOverNodeLocal, rackRemoteOverNodeLocal); } } else { throw new IllegalArgumentException( "attempt result is not SUCCEEDED, FAILED or KILLED: " + loggedAttempt.getResult()); } } } }
public class class_name { @Override public TaskAttemptInfo getMapTaskAttemptInfoAdjusted(int taskNumber, int taskAttemptNumber, int locality) { TaskType taskType = TaskType.MAP; LoggedTask loggedTask = getLoggedTask(taskType, taskNumber); if (loggedTask == null) { // TODO insert parameters TaskInfo taskInfo = new TaskInfo(0, 0, 0, 0, 0); return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber, taskNumber, locality); // depends on control dependency: [if], data = [none] } LoggedTaskAttempt loggedAttempt = getLoggedTaskAttempt(taskType, taskNumber, taskAttemptNumber); if (loggedAttempt == null) { // Task exists, but attempt is missing. TaskInfo taskInfo = getTaskInfo(loggedTask); return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber, taskNumber, locality); } else { // Task and TaskAttempt both exist. if (loggedAttempt.getResult() == Values.KILLED) { TaskInfo taskInfo = getTaskInfo(loggedTask); return makeUpTaskAttemptInfo(taskType, taskInfo, taskAttemptNumber, taskNumber, locality); } else if (loggedAttempt.getResult() == Values.FAILED) { /** * FAILED attempt is not affected by locality however, made-up FAILED * attempts ARE affected by locality, since statistics are present for * attempts of different locality. */ return getTaskAttemptInfo(loggedTask, loggedAttempt); } else if (loggedAttempt.getResult() == Values.SUCCESS) { int loggedLocality = getLocality(loggedTask, loggedAttempt); if (locality == loggedLocality) { return getTaskAttemptInfo(loggedTask, loggedAttempt); // depends on control dependency: [if], data = [none] } else { // attempt succeeded in trace. It is scheduled in simulation with // a different locality. return scaleInfo(loggedTask, loggedAttempt, locality, loggedLocality, rackLocalOverNodeLocal, rackRemoteOverNodeLocal); // depends on control dependency: [if], data = [none] } } else { throw new IllegalArgumentException( "attempt result is not SUCCEEDED, FAILED or KILLED: " + loggedAttempt.getResult()); } } } }
public class class_name { @Override public AbstractIoBufferEx position(int newPosition) { autoExpand(newPosition, 0); buf().position(newPosition); if (mark > newPosition) { mark = -1; } return this; } }
public class class_name { @Override public AbstractIoBufferEx position(int newPosition) { autoExpand(newPosition, 0); buf().position(newPosition); if (mark > newPosition) { mark = -1; // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { protected static void internalAlarmHandler(Object arg) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "internalAlarmHandler", arg); ResendRecord record = (ResendRecord) arg; synchronized (_requestMap) { Long key = new Long(record.requestID); if (_requestMap.containsKey(key)) { // Someone still waiting for the request, figure out what to do about it if (record.triesRemaining != 0) { // We have tries remaining so resend // Short circuit if ME unreachable if (!record.MP.getMPIO().isMEReachable(record.targetUuid)) wakeupWaiter(record.requestID, null); record.MP.getMPIO().sendToMe(record.targetUuid, SIMPConstants.CONTROL_MESSAGE_PRIORITY, record.msg); // 219870: use triesRemaining < 0 to try forever if (record.triesRemaining > 0) record.triesRemaining--; record.MP.getAlarmManager().create(record.resendInterval, _alarmHandler, record); } else { // Wakeup the waiter with a timeout error wakeupWaiter(record.requestID, null); } } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "internalAlarmHandler"); } }
public class class_name { protected static void internalAlarmHandler(Object arg) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "internalAlarmHandler", arg); ResendRecord record = (ResendRecord) arg; synchronized (_requestMap) { Long key = new Long(record.requestID); if (_requestMap.containsKey(key)) { // Someone still waiting for the request, figure out what to do about it if (record.triesRemaining != 0) { // We have tries remaining so resend // Short circuit if ME unreachable if (!record.MP.getMPIO().isMEReachable(record.targetUuid)) wakeupWaiter(record.requestID, null); record.MP.getMPIO().sendToMe(record.targetUuid, SIMPConstants.CONTROL_MESSAGE_PRIORITY, record.msg); // depends on control dependency: [if], data = [none] // 219870: use triesRemaining < 0 to try forever if (record.triesRemaining > 0) record.triesRemaining--; record.MP.getAlarmManager().create(record.resendInterval, _alarmHandler, record); // depends on control dependency: [if], data = [none] } else { // Wakeup the waiter with a timeout error wakeupWaiter(record.requestID, null); // depends on control dependency: [if], data = [none] } } } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "internalAlarmHandler"); } }
public class class_name { private static void init(ClassLoader classLoader, SecurityManager securityManager, Set<String> securedClasses) { if (classLoader == null) throw new IllegalArgumentException("A class loader must be provided"); if (securityManager == null) throw new IllegalArgumentException("A security manager must be provided"); if (securedClasses == null) throw new IllegalArgumentException("Secured classes cannot be null"); Security.securityManager = securityManager; ClassPool cp = ClassPool.getDefault(); for (String securedClass : securedClasses) { secure(classLoader, cp, securedClass); } } }
public class class_name { private static void init(ClassLoader classLoader, SecurityManager securityManager, Set<String> securedClasses) { if (classLoader == null) throw new IllegalArgumentException("A class loader must be provided"); if (securityManager == null) throw new IllegalArgumentException("A security manager must be provided"); if (securedClasses == null) throw new IllegalArgumentException("Secured classes cannot be null"); Security.securityManager = securityManager; ClassPool cp = ClassPool.getDefault(); for (String securedClass : securedClasses) { secure(classLoader, cp, securedClass); // depends on control dependency: [for], data = [securedClass] } } }
public class class_name { @Override public LoadBalancerNodeFilter and(LoadBalancerNodeFilter otherFilter) { if (evaluation instanceof SingleFilterEvaluation && otherFilter.evaluation instanceof SingleFilterEvaluation) { return new LoadBalancerNodeFilter( getLoadBalancerPoolFilter().and(otherFilter.getLoadBalancerPoolFilter()), getPredicate().and(otherFilter.getPredicate()) ); } else { evaluation = new AndEvaluation<>(evaluation, otherFilter, LoadBalancerNodeMetadata::getIpAddress); return this; } } }
public class class_name { @Override public LoadBalancerNodeFilter and(LoadBalancerNodeFilter otherFilter) { if (evaluation instanceof SingleFilterEvaluation && otherFilter.evaluation instanceof SingleFilterEvaluation) { return new LoadBalancerNodeFilter( getLoadBalancerPoolFilter().and(otherFilter.getLoadBalancerPoolFilter()), getPredicate().and(otherFilter.getPredicate()) ); // depends on control dependency: [if], data = [none] } else { evaluation = new AndEvaluation<>(evaluation, otherFilter, LoadBalancerNodeMetadata::getIpAddress); // depends on control dependency: [if], data = [none] return this; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static RuntimeInstance getInstance() { RuntimeInstance result = RuntimeInstance.CACHED_DELEGATE.get(); if (result != null) { return result; } synchronized(RuntimeInstance.CACHED_DELEGATE) { result=RuntimeInstance.CACHED_DELEGATE.get(); if(result==null) { RuntimeInstance.CACHED_DELEGATE.set(findDelegate()); result=RuntimeInstance.CACHED_DELEGATE.get(); } return result; } } }
public class class_name { public static RuntimeInstance getInstance() { RuntimeInstance result = RuntimeInstance.CACHED_DELEGATE.get(); if (result != null) { return result; // depends on control dependency: [if], data = [none] } synchronized(RuntimeInstance.CACHED_DELEGATE) { result=RuntimeInstance.CACHED_DELEGATE.get(); if(result==null) { RuntimeInstance.CACHED_DELEGATE.set(findDelegate()); // depends on control dependency: [if], data = [none] result=RuntimeInstance.CACHED_DELEGATE.get(); // depends on control dependency: [if], data = [none] } return result; } } }
public class class_name { protected CompletionStage<Result> onNotFound(RequestHeader request, String message) { if (environment.isProd()) { return CompletableFuture.completedFuture( Results.notFound( views.html.defaultpages.notFound.render( request.method(), request.uri(), request.asScala()))); } else { return CompletableFuture.completedFuture( Results.notFound( views.html.defaultpages.devNotFound.render( request.method(), request.uri(), Some.apply(routes.get()), request.asScala()))); } } }
public class class_name { protected CompletionStage<Result> onNotFound(RequestHeader request, String message) { if (environment.isProd()) { return CompletableFuture.completedFuture( Results.notFound( views.html.defaultpages.notFound.render( request.method(), request.uri(), request.asScala()))); // depends on control dependency: [if], data = [none] } else { return CompletableFuture.completedFuture( Results.notFound( views.html.defaultpages.devNotFound.render( request.method(), request.uri(), Some.apply(routes.get()), request.asScala()))); // depends on control dependency: [if], data = [none] } } }
public class class_name { public DestinationLocationManager getDestinationLocationManager() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getDestinationLocationManager"); SibTr.exit(tc, "getDestinationLocationManager", _destinationLocationManager); } return _destinationLocationManager; } }
public class class_name { public DestinationLocationManager getDestinationLocationManager() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getDestinationLocationManager"); // depends on control dependency: [if], data = [none] SibTr.exit(tc, "getDestinationLocationManager", _destinationLocationManager); // depends on control dependency: [if], data = [none] } return _destinationLocationManager; } }
public class class_name { public void buildConstantSummaries(XMLNode node, Content contentTree) { PackageDoc[] packages = configuration.packages; printedPackageHeaders = new HashSet<String>(); Content summariesTree = writer.getConstantSummaries(); for (int i = 0; i < packages.length; i++) { if (hasConstantField(packages[i])) { currentPackage = packages[i]; //Build the documentation for the current package. buildChildren(node, summariesTree); } } contentTree.addContent(summariesTree); } }
public class class_name { public void buildConstantSummaries(XMLNode node, Content contentTree) { PackageDoc[] packages = configuration.packages; printedPackageHeaders = new HashSet<String>(); Content summariesTree = writer.getConstantSummaries(); for (int i = 0; i < packages.length; i++) { if (hasConstantField(packages[i])) { currentPackage = packages[i]; // depends on control dependency: [if], data = [none] //Build the documentation for the current package. buildChildren(node, summariesTree); // depends on control dependency: [if], data = [none] } } contentTree.addContent(summariesTree); } }
public class class_name { private boolean entriesImplyPermission(List<ProGradePolicyEntry> policyEntriesList, ProtectionDomain domain, Permission permission) { for (ProGradePolicyEntry entry : policyEntriesList) { if (entry.implies(domain, permission)) { return true; } } return false; } }
public class class_name { private boolean entriesImplyPermission(List<ProGradePolicyEntry> policyEntriesList, ProtectionDomain domain, Permission permission) { for (ProGradePolicyEntry entry : policyEntriesList) { if (entry.implies(domain, permission)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public double transformNumber(double number) { if (ruleSet == null) { return number / divisor; } else { return Math.floor(number / divisor); } } }
public class class_name { public double transformNumber(double number) { if (ruleSet == null) { return number / divisor; // depends on control dependency: [if], data = [none] } else { return Math.floor(number / divisor); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void set(String key, String value) { if("dn".equals(key)) { this.dn = value; } else if (value != null && !value.isEmpty() && key != null && !key.isEmpty()) { addAttribute(new BasicAttribute(key, value, true)); } } }
public class class_name { @Override public void set(String key, String value) { if("dn".equals(key)) { this.dn = value; // depends on control dependency: [if], data = [none] } else if (value != null && !value.isEmpty() && key != null && !key.isEmpty()) { addAttribute(new BasicAttribute(key, value, true)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public EnumDeclarationDescr enumDeclaration( DeclareDescrBuilder ddb ) throws RecognitionException { EnumDeclarationDescrBuilder declare = null; try { declare = helper.start( ddb, EnumDeclarationDescrBuilder.class, null ); // type may be qualified when adding metadata String type = qualifiedIdentifier(); if ( state.failed ) return null; if ( state.backtracking == 0 ) declare.name( type ); while ( input.LA( 1 ) == DRL5Lexer.AT ) { // annotation* annotation( declare ); if ( state.failed ) return null; } while ( input.LA( 1 ) == DRL5Lexer.ID ) { int next = input.LA( 2 ); if ( next == DRL5Lexer.LEFT_PAREN || next == DRL5Lexer.COMMA || next == DRL5Lexer.SEMICOLON ) { enumerative( declare ); if ( state.failed ) return null; } if ( input.LA( 1 ) == DRL5Lexer.COMMA ) { match( input, DRL5Lexer.COMMA, null, null, DroolsEditorType.SYMBOL ); } else { match( input, DRL5Lexer.SEMICOLON, null, null, DroolsEditorType.SYMBOL ); break; } } //boolean qualified = type.indexOf( '.' ) >= 0; while ( //! qualified && input.LA( 1 ) == DRL5Lexer.ID && ! helper.validateIdentifierKey( DroolsSoftKeywords.END ) ) { // field* field( declare ); if ( state.failed ) return null; } match( input, DRL5Lexer.ID, DroolsSoftKeywords.END, null, DroolsEditorType.KEYWORD ); if ( state.failed ) return null; } catch ( RecognitionException re ) { reportError( re ); } finally { helper.end( TypeDeclarationDescrBuilder.class, declare ); } return (declare != null) ? declare.getDescr() : null; } }
public class class_name { public EnumDeclarationDescr enumDeclaration( DeclareDescrBuilder ddb ) throws RecognitionException { EnumDeclarationDescrBuilder declare = null; try { declare = helper.start( ddb, EnumDeclarationDescrBuilder.class, null ); // type may be qualified when adding metadata String type = qualifiedIdentifier(); if ( state.failed ) return null; if ( state.backtracking == 0 ) declare.name( type ); while ( input.LA( 1 ) == DRL5Lexer.AT ) { // annotation* annotation( declare ); // depends on control dependency: [while], data = [none] if ( state.failed ) return null; } while ( input.LA( 1 ) == DRL5Lexer.ID ) { int next = input.LA( 2 ); if ( next == DRL5Lexer.LEFT_PAREN || next == DRL5Lexer.COMMA || next == DRL5Lexer.SEMICOLON ) { enumerative( declare ); // depends on control dependency: [if], data = [none] if ( state.failed ) return null; } if ( input.LA( 1 ) == DRL5Lexer.COMMA ) { match( input, DRL5Lexer.COMMA, null, null, DroolsEditorType.SYMBOL ); // depends on control dependency: [if], data = [none] } else { match( input, DRL5Lexer.SEMICOLON, null, null, DroolsEditorType.SYMBOL ); // depends on control dependency: [if], data = [none] break; } } //boolean qualified = type.indexOf( '.' ) >= 0; while ( //! qualified && input.LA( 1 ) == DRL5Lexer.ID && ! helper.validateIdentifierKey( DroolsSoftKeywords.END ) ) { // field* field( declare ); // depends on control dependency: [while], data = [none] if ( state.failed ) return null; } match( input, DRL5Lexer.ID, DroolsSoftKeywords.END, null, DroolsEditorType.KEYWORD ); if ( state.failed ) return null; } catch ( RecognitionException re ) { reportError( re ); } finally { helper.end( TypeDeclarationDescrBuilder.class, declare ); } return (declare != null) ? declare.getDescr() : null; } }
public class class_name { public void stop() { Preconditions.checkState(running, "Cannot stop if it's not running"); synchronized (this) { log.atInfo().log("stopping client background thread and flushing the report aggregator"); for (ReportRequest req : reportAggregator.clear()) { try { transport.services().report(serviceName, req).execute(); } catch (IOException e) { log.atSevere().withCause(e).log("direct send of a report request failed"); } } this.stopped = true; // the scheduler thread will set running to false if (isRunningSchedulerDirectly()) { resetIfStopped(); } this.scheduler = null; } } }
public class class_name { public void stop() { Preconditions.checkState(running, "Cannot stop if it's not running"); synchronized (this) { log.atInfo().log("stopping client background thread and flushing the report aggregator"); for (ReportRequest req : reportAggregator.clear()) { try { transport.services().report(serviceName, req).execute(); // depends on control dependency: [try], data = [none] } catch (IOException e) { log.atSevere().withCause(e).log("direct send of a report request failed"); } // depends on control dependency: [catch], data = [none] } this.stopped = true; // the scheduler thread will set running to false if (isRunningSchedulerDirectly()) { resetIfStopped(); // depends on control dependency: [if], data = [none] } this.scheduler = null; } } }
public class class_name { void addSourceTargetSet(UnicodeSet filter, UnicodeSet sourceSet, UnicodeSet targetSet) { UnicodeSet currentFilter = new UnicodeSet(filter); UnicodeSet revisiting = new UnicodeSet(); int count = ruleVector.size(); for (int i=0; i<count; ++i) { TransliterationRule r = ruleVector.get(i); r.addSourceTargetSet(currentFilter, sourceSet, targetSet, revisiting.clear()); currentFilter.addAll(revisiting); } } }
public class class_name { void addSourceTargetSet(UnicodeSet filter, UnicodeSet sourceSet, UnicodeSet targetSet) { UnicodeSet currentFilter = new UnicodeSet(filter); UnicodeSet revisiting = new UnicodeSet(); int count = ruleVector.size(); for (int i=0; i<count; ++i) { TransliterationRule r = ruleVector.get(i); r.addSourceTargetSet(currentFilter, sourceSet, targetSet, revisiting.clear()); // depends on control dependency: [for], data = [none] currentFilter.addAll(revisiting); // depends on control dependency: [for], data = [none] } } }
public class class_name { @Override public void markUnread(String channel, Collection<EventId> events) { // For each slab keep track of the earliest index for each unread event. ConcurrentMap<ChannelSlab, Integer> channelSlabs = Maps.newConcurrentMap(); for (EventId event : events) { AstyanaxEventId astyanaxEvent = (AstyanaxEventId) event; checkArgument(channel.equals(astyanaxEvent.getChannel())); channelSlabs.merge(new ChannelSlab(channel, astyanaxEvent.getSlabId()), astyanaxEvent.getEventIdx(), Ints::min); } for (Map.Entry<ChannelSlab, Integer> entry : channelSlabs.entrySet()) { ChannelSlab channelSlab = entry.getKey(); int eventIdx = entry.getValue(); // Get the closed slab cursor, if any SlabCursor cursor = _closedSlabCursors.getIfPresent(channelSlab); // If the cursor exists and is beyond the lowest unread index, rewind it if (cursor != null && (cursor.get() == SlabCursor.END || cursor.get() > eventIdx)) { // Synchronize on the cursor before updating it to avoid concurrent updates with a read //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (cursor) { if (cursor.get() == SlabCursor.END || cursor.get() > eventIdx) { cursor.set(eventIdx); } } } } } }
public class class_name { @Override public void markUnread(String channel, Collection<EventId> events) { // For each slab keep track of the earliest index for each unread event. ConcurrentMap<ChannelSlab, Integer> channelSlabs = Maps.newConcurrentMap(); for (EventId event : events) { AstyanaxEventId astyanaxEvent = (AstyanaxEventId) event; checkArgument(channel.equals(astyanaxEvent.getChannel())); // depends on control dependency: [for], data = [none] channelSlabs.merge(new ChannelSlab(channel, astyanaxEvent.getSlabId()), astyanaxEvent.getEventIdx(), Ints::min); // depends on control dependency: [for], data = [none] } for (Map.Entry<ChannelSlab, Integer> entry : channelSlabs.entrySet()) { ChannelSlab channelSlab = entry.getKey(); int eventIdx = entry.getValue(); // Get the closed slab cursor, if any SlabCursor cursor = _closedSlabCursors.getIfPresent(channelSlab); // If the cursor exists and is beyond the lowest unread index, rewind it if (cursor != null && (cursor.get() == SlabCursor.END || cursor.get() > eventIdx)) { // Synchronize on the cursor before updating it to avoid concurrent updates with a read //noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (cursor) { // depends on control dependency: [if], data = [none] if (cursor.get() == SlabCursor.END || cursor.get() > eventIdx) { cursor.set(eventIdx); // depends on control dependency: [if], data = [none] } } } } } }
public class class_name { public static boolean stopLiveValidation(final Object target) { if (sLiveValidations == null || ! sLiveValidations.containsKey(target)) { return false; } final ViewGlobalFocusChangeListener removed = sLiveValidations.remove(target); final ViewTreeObserver treeObserver = removed.formContainer.getViewTreeObserver(); if (treeObserver.isAlive()) { treeObserver.removeOnGlobalFocusChangeListener(removed); return true; } return false; } }
public class class_name { public static boolean stopLiveValidation(final Object target) { if (sLiveValidations == null || ! sLiveValidations.containsKey(target)) { return false; // depends on control dependency: [if], data = [none] } final ViewGlobalFocusChangeListener removed = sLiveValidations.remove(target); final ViewTreeObserver treeObserver = removed.formContainer.getViewTreeObserver(); if (treeObserver.isAlive()) { treeObserver.removeOnGlobalFocusChangeListener(removed); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { @Override public void notifyItemChanged(int rowIndex, int columnIndex) { for (AdaptiveTableDataSetObserver observer : mAdaptiveTableDataSetObservers) { observer.notifyItemChanged(rowIndex, columnIndex); } } }
public class class_name { @Override public void notifyItemChanged(int rowIndex, int columnIndex) { for (AdaptiveTableDataSetObserver observer : mAdaptiveTableDataSetObservers) { observer.notifyItemChanged(rowIndex, columnIndex); // depends on control dependency: [for], data = [observer] } } }
public class class_name { private void init( List<T> list ) { this.list = list; indexes = new int[ list.size() ]; counters = new int[ list.size() ]; for( int i = 0; i < indexes.length ; i++ ) { counters[i] = indexes[i] = i; } total = 1; for( int i = 2; i <= indexes.length ; i++ ) { total *= i; } permutation = 0; } }
public class class_name { private void init( List<T> list ) { this.list = list; indexes = new int[ list.size() ]; counters = new int[ list.size() ]; for( int i = 0; i < indexes.length ; i++ ) { counters[i] = indexes[i] = i; // depends on control dependency: [for], data = [i] } total = 1; for( int i = 2; i <= indexes.length ; i++ ) { total *= i; // depends on control dependency: [for], data = [i] } permutation = 0; } }
public class class_name { public static int e(String msg) { // This is a quick check to avoid the expensive stack trace reflection. if (!activated) { return 0; } String caller = LogHelper.getCaller(); if (caller != null) { return e(caller, msg); } return 0; } }
public class class_name { public static int e(String msg) { // This is a quick check to avoid the expensive stack trace reflection. if (!activated) { return 0; // depends on control dependency: [if], data = [none] } String caller = LogHelper.getCaller(); if (caller != null) { return e(caller, msg); // depends on control dependency: [if], data = [(caller] } return 0; } }
public class class_name { @Trivial private File getArchiveFile() { String methodName = "getArchiveFile"; if ( archiveFileLock != null ) { synchronized ( archiveFileLock ) { if ( (archiveFile == null) && !archiveFileFailed ) { try { archiveFile = extractEntry( entryInEnclosingContainer, getCacheDir() ); // 'extractEntry' throws IOException if ( archiveFile != null ) { archiveFilePath = archiveFile.getAbsolutePath(); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) { Tr.debug(tc, methodName + " Archive file [ " + archiveFilePath + " ]"); } } else { archiveFileFailed = true; Tr.error(tc, "extract.cache.null", entryInEnclosingContainer.getPath()); } } catch ( IOException e ) { archiveFileFailed = true; Tr.error(tc, "extract.cache.fail", e.getMessage()); } } } } return archiveFile; } }
public class class_name { @Trivial private File getArchiveFile() { String methodName = "getArchiveFile"; if ( archiveFileLock != null ) { synchronized ( archiveFileLock ) { // depends on control dependency: [if], data = [( archiveFileLock] if ( (archiveFile == null) && !archiveFileFailed ) { try { archiveFile = extractEntry( entryInEnclosingContainer, getCacheDir() ); // depends on control dependency: [try], data = [none] // 'extractEntry' throws IOException if ( archiveFile != null ) { archiveFilePath = archiveFile.getAbsolutePath(); // depends on control dependency: [if], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) { Tr.debug(tc, methodName + " Archive file [ " + archiveFilePath + " ]"); // depends on control dependency: [if], data = [none] } } else { archiveFileFailed = true; // depends on control dependency: [if], data = [none] Tr.error(tc, "extract.cache.null", entryInEnclosingContainer.getPath()); // depends on control dependency: [if], data = [none] } } catch ( IOException e ) { archiveFileFailed = true; Tr.error(tc, "extract.cache.fail", e.getMessage()); } // depends on control dependency: [catch], data = [none] } } } return archiveFile; } }
public class class_name { public void marshall(HttpInstanceSummary httpInstanceSummary, ProtocolMarshaller protocolMarshaller) { if (httpInstanceSummary == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(httpInstanceSummary.getInstanceId(), INSTANCEID_BINDING); protocolMarshaller.marshall(httpInstanceSummary.getNamespaceName(), NAMESPACENAME_BINDING); protocolMarshaller.marshall(httpInstanceSummary.getServiceName(), SERVICENAME_BINDING); protocolMarshaller.marshall(httpInstanceSummary.getHealthStatus(), HEALTHSTATUS_BINDING); protocolMarshaller.marshall(httpInstanceSummary.getAttributes(), ATTRIBUTES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(HttpInstanceSummary httpInstanceSummary, ProtocolMarshaller protocolMarshaller) { if (httpInstanceSummary == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(httpInstanceSummary.getInstanceId(), INSTANCEID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(httpInstanceSummary.getNamespaceName(), NAMESPACENAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(httpInstanceSummary.getServiceName(), SERVICENAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(httpInstanceSummary.getHealthStatus(), HEALTHSTATUS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(httpInstanceSummary.getAttributes(), ATTRIBUTES_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void visitObjectRecursive(Object object, Filter<Object> visitor, Set<HashKey<Object>> visitedSet) { if (object == null) { return; } if (visitedSet != null) { HashKey<Object> hashKey = new HashKey<>(object); boolean added = visitedSet.add(hashKey); if (!added) { // already visited same object... return; } } boolean accepted = visitor.accept(object); if (!accepted) { return; } if (object instanceof Collection) { Collection<?> collection = (Collection<?>) object; for (Object element : collection) { visitObjectRecursive(element, visitor, visitedSet); } } else if (object instanceof Map) { Map<?, ?> map = (Map<?, ?>) object; // ETOs should only be used as values and not as keys... for (Map.Entry<?, ?> entry : map.entrySet()) { visitObjectRecursive(entry.getKey(), visitor, visitedSet); visitObjectRecursive(entry.getValue(), visitor, visitedSet); } } else if (object instanceof Object[]) { Object[] array = (Object[]) object; for (Object element : array) { visitObjectRecursive(element, visitor, visitedSet); } } else if (object instanceof Type) { // we do not traverse types (Class, ParameterizedType, TypeVariable, ...) return; } else { Class<?> objectClass = object.getClass(); if (objectClass.isArray()) { return; } PojoDescriptor<?> descriptor = this.pojoDescriptorBuilder.getDescriptor(objectClass); Collection<? extends PojoPropertyDescriptor> propertyDescriptors = descriptor.getPropertyDescriptors(); for (PojoPropertyDescriptor propertyDescriptor : propertyDescriptors) { if (!"class".equals(propertyDescriptor.getName())) { PojoPropertyAccessorNonArg getter = propertyDescriptor.getAccessor(PojoPropertyAccessorNonArgMode.GET); if (getter != null) { Object propertyValue = getter.invoke(object); visitObjectRecursive(propertyValue, visitor, visitedSet); } } } } } }
public class class_name { protected void visitObjectRecursive(Object object, Filter<Object> visitor, Set<HashKey<Object>> visitedSet) { if (object == null) { return; // depends on control dependency: [if], data = [none] } if (visitedSet != null) { HashKey<Object> hashKey = new HashKey<>(object); boolean added = visitedSet.add(hashKey); if (!added) { // already visited same object... return; // depends on control dependency: [if], data = [none] } } boolean accepted = visitor.accept(object); if (!accepted) { return; // depends on control dependency: [if], data = [none] } if (object instanceof Collection) { Collection<?> collection = (Collection<?>) object; for (Object element : collection) { visitObjectRecursive(element, visitor, visitedSet); // depends on control dependency: [for], data = [element] } } else if (object instanceof Map) { Map<?, ?> map = (Map<?, ?>) object; // depends on control dependency: [if], data = [none] // ETOs should only be used as values and not as keys... for (Map.Entry<?, ?> entry : map.entrySet()) { visitObjectRecursive(entry.getKey(), visitor, visitedSet); // depends on control dependency: [for], data = [entry] visitObjectRecursive(entry.getValue(), visitor, visitedSet); // depends on control dependency: [for], data = [entry] } } else if (object instanceof Object[]) { Object[] array = (Object[]) object; for (Object element : array) { visitObjectRecursive(element, visitor, visitedSet); // depends on control dependency: [for], data = [element] } } else if (object instanceof Type) { // we do not traverse types (Class, ParameterizedType, TypeVariable, ...) return; // depends on control dependency: [if], data = [none] } else { Class<?> objectClass = object.getClass(); if (objectClass.isArray()) { return; // depends on control dependency: [if], data = [none] } PojoDescriptor<?> descriptor = this.pojoDescriptorBuilder.getDescriptor(objectClass); Collection<? extends PojoPropertyDescriptor> propertyDescriptors = descriptor.getPropertyDescriptors(); // depends on control dependency: [if], data = [none] for (PojoPropertyDescriptor propertyDescriptor : propertyDescriptors) { if (!"class".equals(propertyDescriptor.getName())) { PojoPropertyAccessorNonArg getter = propertyDescriptor.getAccessor(PojoPropertyAccessorNonArgMode.GET); if (getter != null) { Object propertyValue = getter.invoke(object); visitObjectRecursive(propertyValue, visitor, visitedSet); // depends on control dependency: [if], data = [none] } } } } } }
public class class_name { public void marshall(DeleteTagsRequest deleteTagsRequest, ProtocolMarshaller protocolMarshaller) { if (deleteTagsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteTagsRequest.getConfigurationIds(), CONFIGURATIONIDS_BINDING); protocolMarshaller.marshall(deleteTagsRequest.getTags(), TAGS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DeleteTagsRequest deleteTagsRequest, ProtocolMarshaller protocolMarshaller) { if (deleteTagsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteTagsRequest.getConfigurationIds(), CONFIGURATIONIDS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(deleteTagsRequest.getTags(), TAGS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void marshall(BackupSelectionsListMember backupSelectionsListMember, ProtocolMarshaller protocolMarshaller) { if (backupSelectionsListMember == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(backupSelectionsListMember.getSelectionId(), SELECTIONID_BINDING); protocolMarshaller.marshall(backupSelectionsListMember.getSelectionName(), SELECTIONNAME_BINDING); protocolMarshaller.marshall(backupSelectionsListMember.getBackupPlanId(), BACKUPPLANID_BINDING); protocolMarshaller.marshall(backupSelectionsListMember.getCreationDate(), CREATIONDATE_BINDING); protocolMarshaller.marshall(backupSelectionsListMember.getCreatorRequestId(), CREATORREQUESTID_BINDING); protocolMarshaller.marshall(backupSelectionsListMember.getIamRoleArn(), IAMROLEARN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(BackupSelectionsListMember backupSelectionsListMember, ProtocolMarshaller protocolMarshaller) { if (backupSelectionsListMember == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(backupSelectionsListMember.getSelectionId(), SELECTIONID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(backupSelectionsListMember.getSelectionName(), SELECTIONNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(backupSelectionsListMember.getBackupPlanId(), BACKUPPLANID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(backupSelectionsListMember.getCreationDate(), CREATIONDATE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(backupSelectionsListMember.getCreatorRequestId(), CREATORREQUESTID_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(backupSelectionsListMember.getIamRoleArn(), IAMROLEARN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void updateStep(final String uuid, final Consumer<StepResult> update) { final Optional<StepResult> found = storage.getStep(uuid); if (!found.isPresent()) { LOGGER.error("Could not update step: step with uuid {} not found", uuid); return; } final StepResult step = found.get(); notifier.beforeStepUpdate(step); update.accept(step); notifier.afterStepUpdate(step); } }
public class class_name { public void updateStep(final String uuid, final Consumer<StepResult> update) { final Optional<StepResult> found = storage.getStep(uuid); if (!found.isPresent()) { LOGGER.error("Could not update step: step with uuid {} not found", uuid); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } final StepResult step = found.get(); notifier.beforeStepUpdate(step); update.accept(step); notifier.afterStepUpdate(step); } }
public class class_name { public QueryBuilder addParentIds(final Collection<Integer> ids) { if (ids != null) { parentIds.addAll(ids); } return this; } }
public class class_name { public QueryBuilder addParentIds(final Collection<Integer> ids) { if (ids != null) { parentIds.addAll(ids); // depends on control dependency: [if], data = [(ids] } return this; } }
public class class_name { public void registerHandler(String method, String url, OphApiHandler handler) { if (mtdHandler == null) mtdHandler = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); TreeMap<String, OphApiHandler> reg; if (method == null) method = "ALL"; reg = mtdHandler.get(method); if (reg == null) { reg = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); mtdHandler.put(method, reg); } reg.put(url, handler); } }
public class class_name { public void registerHandler(String method, String url, OphApiHandler handler) { if (mtdHandler == null) mtdHandler = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); TreeMap<String, OphApiHandler> reg; if (method == null) method = "ALL"; reg = mtdHandler.get(method); if (reg == null) { reg = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); // depends on control dependency: [if], data = [none] mtdHandler.put(method, reg); // depends on control dependency: [if], data = [none] } reg.put(url, handler); } }
public class class_name { protected final void sendElement() { if (connection == null) return; try { connection.sendMessage(getElement()); } catch (IOException e) { e.printStackTrace(); } } }
public class class_name { protected final void sendElement() { if (connection == null) return; try { connection.sendMessage(getElement()); // depends on control dependency: [try], data = [none] } catch (IOException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void parseCamundaOutputParameters(Element inputOutputElement, IoMapping ioMapping) { List<Element> outputParameters = inputOutputElement.elementsNS(BpmnParse.CAMUNDA_BPMN_EXTENSIONS_NS, "outputParameter"); for (Element outputParameterElement : outputParameters) { parseOutputParameterElement(outputParameterElement, ioMapping); } } }
public class class_name { public static void parseCamundaOutputParameters(Element inputOutputElement, IoMapping ioMapping) { List<Element> outputParameters = inputOutputElement.elementsNS(BpmnParse.CAMUNDA_BPMN_EXTENSIONS_NS, "outputParameter"); for (Element outputParameterElement : outputParameters) { parseOutputParameterElement(outputParameterElement, ioMapping); // depends on control dependency: [for], data = [outputParameterElement] } } }
public class class_name { public void validate(String origin) { List<String> problems = new ArrayList<>(); if(source == null) { if(path == null) { problems.add("You must specify either path or source"); } else if(method == null) { problems.add("You must specify method along with path: " + path); } } else { if(path != null) { problems.add("Conflicting source: " + source + " and path: " + path); } if(method != null) { problems.add("Conflicting source: " + source + " and method: " + method); } } if(method != null && !Util.METHODS.contains(method.toUpperCase())) { problems.add("Invalid HTTP method: " + method); } if(!problems.isEmpty()) { throw new RuntimeException("Bad paths element in " + origin + " [ " + String.join(" | ", problems) + " ]"); } } }
public class class_name { public void validate(String origin) { List<String> problems = new ArrayList<>(); if(source == null) { if(path == null) { problems.add("You must specify either path or source"); // depends on control dependency: [if], data = [none] } else if(method == null) { problems.add("You must specify method along with path: " + path); // depends on control dependency: [if], data = [none] } } else { if(path != null) { problems.add("Conflicting source: " + source + " and path: " + path); // depends on control dependency: [if], data = [none] } if(method != null) { problems.add("Conflicting source: " + source + " and method: " + method); // depends on control dependency: [if], data = [none] } } if(method != null && !Util.METHODS.contains(method.toUpperCase())) { problems.add("Invalid HTTP method: " + method); // depends on control dependency: [if], data = [none] } if(!problems.isEmpty()) { throw new RuntimeException("Bad paths element in " + origin + " [ " + String.join(" | ", problems) + " ]"); } } }
public class class_name { public void readDependencies(CmsObject cms) { try { // read all resources in the parent folder of the published resource List<CmsResource> folderContent = cms.getResourcesInFolder( CmsResource.getParentFolder(cms.getRequestContext().removeSiteRoot(getResource().getRootPath())), CmsResourceFilter.DEFAULT); // now calculate the dependencies form the folder content that has been read readDependencies(cms, folderContent); } catch (CmsException e) { LOG.warn("Unable to read dependencies for " + getResource().getRootPath(), e); } } }
public class class_name { public void readDependencies(CmsObject cms) { try { // read all resources in the parent folder of the published resource List<CmsResource> folderContent = cms.getResourcesInFolder( CmsResource.getParentFolder(cms.getRequestContext().removeSiteRoot(getResource().getRootPath())), CmsResourceFilter.DEFAULT); // now calculate the dependencies form the folder content that has been read readDependencies(cms, folderContent); // depends on control dependency: [try], data = [none] } catch (CmsException e) { LOG.warn("Unable to read dependencies for " + getResource().getRootPath(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void loop(MastersSlavesListener listener, final GlobalStateInfo globalInfo, final List<HostAddress> addresses, SearchFilter searchFilter) throws SQLException { MastersSlavesProtocol protocol; ArrayDeque<HostAddress> loopAddresses = new ArrayDeque<>(addresses); if (loopAddresses.isEmpty()) { resetHostList(listener, loopAddresses); } int maxConnectionTry = listener.getRetriesAllDown(); SQLException lastQueryException = null; boolean firstLoop = true; while (!loopAddresses.isEmpty() || (!searchFilter.isFailoverLoop() && maxConnectionTry > 0)) { protocol = getNewProtocol(listener.getProxy(), globalInfo, listener.getUrlParser()); if (listener.isExplicitClosed() || (!listener.isSecondaryHostFailReconnect() && !listener .isMasterHostFailReconnect())) { return; } maxConnectionTry--; try { HostAddress host = loopAddresses.pollFirst(); if (host == null) { loopAddresses.addAll(listener.getUrlParser().getHostAddresses()); host = loopAddresses.pollFirst(); } protocol.setHostAddress(host); protocol.connect(); if (listener.isExplicitClosed()) { protocol.close(); return; } listener.removeFromBlacklist(protocol.getHostAddress()); if (listener.isMasterHostFailReconnect() && protocol.isMasterConnection()) { if (foundMaster(listener, protocol, searchFilter)) { return; } } else if (listener.isSecondaryHostFailReconnect() && !protocol.isMasterConnection()) { if (foundSecondary(listener, protocol, searchFilter)) { return; } } else { protocol.close(); } } catch (SQLException e) { lastQueryException = e; listener.addToBlacklist(protocol.getHostAddress()); } if (!listener.isMasterHostFailReconnect() && !listener.isSecondaryHostFailReconnect()) { return; } //in case master not found but slave is , and allowing master down if (loopAddresses.isEmpty() && (listener.isMasterHostFailReconnect() && listener.urlParser.getOptions().allowMasterDownConnection && !listener.isSecondaryHostFailReconnect())) { return; } //on connection and all slaves have been tested, use master if on if (loopAddresses.isEmpty() && searchFilter.isInitialConnection() && !listener.isMasterHostFailReconnect()) { return; } // if server has try to connect to all host, and there is remaining master or slave that fail // add all servers back to continue looping until maxConnectionTry is reached if (loopAddresses.isEmpty() && !searchFilter.isFailoverLoop() && maxConnectionTry > 0) { resetHostList(listener, loopAddresses); if (firstLoop) { firstLoop = false; } else { try { //wait 250ms before looping through all connection another time Thread.sleep(250); } catch (InterruptedException interrupted) { //interrupted, continue } } } } if (listener.isMasterHostFailReconnect() || listener.isSecondaryHostFailReconnect()) { String error = "No active connection found for replica"; if (listener.isMasterHostFailReconnect()) { error = "No active connection found for master"; } if (lastQueryException != null) { throw new SQLException(error + " : " + lastQueryException.getMessage(), lastQueryException.getSQLState(), lastQueryException.getErrorCode(), lastQueryException); } throw new SQLException(error); } } }
public class class_name { public static void loop(MastersSlavesListener listener, final GlobalStateInfo globalInfo, final List<HostAddress> addresses, SearchFilter searchFilter) throws SQLException { MastersSlavesProtocol protocol; ArrayDeque<HostAddress> loopAddresses = new ArrayDeque<>(addresses); if (loopAddresses.isEmpty()) { resetHostList(listener, loopAddresses); } int maxConnectionTry = listener.getRetriesAllDown(); SQLException lastQueryException = null; boolean firstLoop = true; while (!loopAddresses.isEmpty() || (!searchFilter.isFailoverLoop() && maxConnectionTry > 0)) { protocol = getNewProtocol(listener.getProxy(), globalInfo, listener.getUrlParser()); if (listener.isExplicitClosed() || (!listener.isSecondaryHostFailReconnect() && !listener .isMasterHostFailReconnect())) { return; // depends on control dependency: [if], data = [none] } maxConnectionTry--; try { HostAddress host = loopAddresses.pollFirst(); if (host == null) { loopAddresses.addAll(listener.getUrlParser().getHostAddresses()); // depends on control dependency: [if], data = [none] host = loopAddresses.pollFirst(); // depends on control dependency: [if], data = [none] } protocol.setHostAddress(host); // depends on control dependency: [try], data = [none] protocol.connect(); // depends on control dependency: [try], data = [none] if (listener.isExplicitClosed()) { protocol.close(); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } listener.removeFromBlacklist(protocol.getHostAddress()); // depends on control dependency: [try], data = [none] if (listener.isMasterHostFailReconnect() && protocol.isMasterConnection()) { if (foundMaster(listener, protocol, searchFilter)) { return; // depends on control dependency: [if], data = [none] } } else if (listener.isSecondaryHostFailReconnect() && !protocol.isMasterConnection()) { if (foundSecondary(listener, protocol, searchFilter)) { return; // depends on control dependency: [if], data = [none] } } else { protocol.close(); // depends on control dependency: [if], data = [none] } } catch (SQLException e) { lastQueryException = e; listener.addToBlacklist(protocol.getHostAddress()); } // depends on control dependency: [catch], data = [none] if (!listener.isMasterHostFailReconnect() && !listener.isSecondaryHostFailReconnect()) { return; // depends on control dependency: [if], data = [none] } //in case master not found but slave is , and allowing master down if (loopAddresses.isEmpty() && (listener.isMasterHostFailReconnect() && listener.urlParser.getOptions().allowMasterDownConnection && !listener.isSecondaryHostFailReconnect())) { return; // depends on control dependency: [if], data = [none] } //on connection and all slaves have been tested, use master if on if (loopAddresses.isEmpty() && searchFilter.isInitialConnection() && !listener.isMasterHostFailReconnect()) { return; // depends on control dependency: [if], data = [none] } // if server has try to connect to all host, and there is remaining master or slave that fail // add all servers back to continue looping until maxConnectionTry is reached if (loopAddresses.isEmpty() && !searchFilter.isFailoverLoop() && maxConnectionTry > 0) { resetHostList(listener, loopAddresses); // depends on control dependency: [if], data = [none] if (firstLoop) { firstLoop = false; // depends on control dependency: [if], data = [none] } else { try { //wait 250ms before looping through all connection another time Thread.sleep(250); // depends on control dependency: [try], data = [none] } catch (InterruptedException interrupted) { //interrupted, continue } // depends on control dependency: [catch], data = [none] } } } if (listener.isMasterHostFailReconnect() || listener.isSecondaryHostFailReconnect()) { String error = "No active connection found for replica"; if (listener.isMasterHostFailReconnect()) { error = "No active connection found for master"; // depends on control dependency: [if], data = [none] } if (lastQueryException != null) { throw new SQLException(error + " : " + lastQueryException.getMessage(), lastQueryException.getSQLState(), lastQueryException.getErrorCode(), lastQueryException); } throw new SQLException(error); } } }
public class class_name { protected AccessControlGroup group(String groupId, List<AccessControlGroup> inherits, String... permissionIds) { AccessControlGroup group = new AccessControlGroup(groupId); group.setInherits(inherits); List<AccessControlPermission> permissions = new ArrayList<>(permissionIds.length); for (String permissionId : permissionIds) { permissions.add(permission(permissionId)); } group.setPermissions(permissions); addAccessControl(group); return group; } }
public class class_name { protected AccessControlGroup group(String groupId, List<AccessControlGroup> inherits, String... permissionIds) { AccessControlGroup group = new AccessControlGroup(groupId); group.setInherits(inherits); List<AccessControlPermission> permissions = new ArrayList<>(permissionIds.length); for (String permissionId : permissionIds) { permissions.add(permission(permissionId)); // depends on control dependency: [for], data = [permissionId] } group.setPermissions(permissions); addAccessControl(group); return group; } }
public class class_name { void setPolicyInfo(Collection<PolicyInfo> all) throws IOException { this.all = all; this.pathToPolicy.clear(); // keep a reverse map from all top-level paths to policies for (PolicyInfo pinfo: all) { pathToPolicy.add(new PathToPolicy(pinfo.getSrcPath(), pinfo)); for (PathInfo d:pinfo.getDestPaths()) { pathToPolicy.add(new PathToPolicy(d.rpath, pinfo)); } } // keep all paths sorted in revere lexicographical order so that // we longest path is first. Comparator<PathToPolicy> comp = new Comparator<PathToPolicy>() { public int compare(PathToPolicy p1, PathToPolicy p2) { return 0 - p1.spath.compareTo(p2.spath); } }; Collections.sort(pathToPolicy, comp); } }
public class class_name { void setPolicyInfo(Collection<PolicyInfo> all) throws IOException { this.all = all; this.pathToPolicy.clear(); // keep a reverse map from all top-level paths to policies for (PolicyInfo pinfo: all) { pathToPolicy.add(new PathToPolicy(pinfo.getSrcPath(), pinfo)); for (PathInfo d:pinfo.getDestPaths()) { pathToPolicy.add(new PathToPolicy(d.rpath, pinfo)); // depends on control dependency: [for], data = [d] } } // keep all paths sorted in revere lexicographical order so that // we longest path is first. Comparator<PathToPolicy> comp = new Comparator<PathToPolicy>() { public int compare(PathToPolicy p1, PathToPolicy p2) { return 0 - p1.spath.compareTo(p2.spath); } }; Collections.sort(pathToPolicy, comp); } }
public class class_name { private Entry<C> getFromEitherMap(String key) { Entry<C> entry = liveMap.get(key); if (entry != null) return entry; else { return lingerersMap.get(key); } } }
public class class_name { private Entry<C> getFromEitherMap(String key) { Entry<C> entry = liveMap.get(key); if (entry != null) return entry; else { return lingerersMap.get(key); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static Object convertFromJsonValue(JsonValue value) { if (value == null) { return null; } else if (value.isNumber()) { if (value.toString().contains(".")) { return value.asDouble(); } else { return value.asLong(); } } else if (value.isBoolean()) { return value.asBoolean(); } else if (value.isNull()) { return null; } else if (value.isString()) { return value.asString(); } else if (value == NonTerminalJsonValue.INSTANCE) { return value; } throw new IllegalArgumentException("Unknown Json type: " + value); } }
public class class_name { public static Object convertFromJsonValue(JsonValue value) { if (value == null) { return null; // depends on control dependency: [if], data = [none] } else if (value.isNumber()) { if (value.toString().contains(".")) { return value.asDouble(); // depends on control dependency: [if], data = [none] } else { return value.asLong(); // depends on control dependency: [if], data = [none] } } else if (value.isBoolean()) { return value.asBoolean(); // depends on control dependency: [if], data = [none] } else if (value.isNull()) { return null; // depends on control dependency: [if], data = [none] } else if (value.isString()) { return value.asString(); // depends on control dependency: [if], data = [none] } else if (value == NonTerminalJsonValue.INSTANCE) { return value; // depends on control dependency: [if], data = [none] } throw new IllegalArgumentException("Unknown Json type: " + value); } }
public class class_name { @GuardedBy("lock") private List<Long> getLedgerIdsToDelete(LogMetadata oldMetadata, LogMetadata currentMetadata) { if (oldMetadata == null) { return Collections.emptyList(); } val existingIds = currentMetadata.getLedgers().stream() .map(LedgerMetadata::getLedgerId) .collect(Collectors.toSet()); return oldMetadata.getLedgers().stream() .map(LedgerMetadata::getLedgerId) .filter(id -> !existingIds.contains(id)) .collect(Collectors.toList()); } }
public class class_name { @GuardedBy("lock") private List<Long> getLedgerIdsToDelete(LogMetadata oldMetadata, LogMetadata currentMetadata) { if (oldMetadata == null) { return Collections.emptyList(); // depends on control dependency: [if], data = [none] } val existingIds = currentMetadata.getLedgers().stream() .map(LedgerMetadata::getLedgerId) .collect(Collectors.toSet()); return oldMetadata.getLedgers().stream() .map(LedgerMetadata::getLedgerId) .filter(id -> !existingIds.contains(id)) .collect(Collectors.toList()); } }
public class class_name { CipherSuiteList getActiveCipherSuites() { if (activeCipherSuites == null) { if (activeProtocols == null) { activeProtocols = getActiveProtocols(); } ArrayList<CipherSuite> suites = new ArrayList<>(); if (!(activeProtocols.collection().isEmpty()) && activeProtocols.min.v != ProtocolVersion.NONE.v) { for (CipherSuite suite : enabledCipherSuites.collection()) { if (suite.obsoleted > activeProtocols.min.v && suite.supported <= activeProtocols.max.v) { if (algorithmConstraints.permits( EnumSet.of(CryptoPrimitive.KEY_AGREEMENT), suite.name, null)) { suites.add(suite); } } else if (debug != null && Debug.isOn("verbose")) { if (suite.obsoleted <= activeProtocols.min.v) { System.out.println( "Ignoring obsoleted cipher suite: " + suite); } else { System.out.println( "Ignoring unsupported cipher suite: " + suite); } } } } activeCipherSuites = new CipherSuiteList(suites); } return activeCipherSuites; } }
public class class_name { CipherSuiteList getActiveCipherSuites() { if (activeCipherSuites == null) { if (activeProtocols == null) { activeProtocols = getActiveProtocols(); // depends on control dependency: [if], data = [none] } ArrayList<CipherSuite> suites = new ArrayList<>(); if (!(activeProtocols.collection().isEmpty()) && activeProtocols.min.v != ProtocolVersion.NONE.v) { for (CipherSuite suite : enabledCipherSuites.collection()) { if (suite.obsoleted > activeProtocols.min.v && suite.supported <= activeProtocols.max.v) { if (algorithmConstraints.permits( EnumSet.of(CryptoPrimitive.KEY_AGREEMENT), suite.name, null)) { suites.add(suite); // depends on control dependency: [if], data = [none] } } else if (debug != null && Debug.isOn("verbose")) { if (suite.obsoleted <= activeProtocols.min.v) { System.out.println( "Ignoring obsoleted cipher suite: " + suite); // depends on control dependency: [if], data = [none] } else { System.out.println( "Ignoring unsupported cipher suite: " + suite); // depends on control dependency: [if], data = [none] } } } } activeCipherSuites = new CipherSuiteList(suites); // depends on control dependency: [if], data = [none] } return activeCipherSuites; } }
public class class_name { public SpringApplicationBuilder parent(Class<?>... sources) { if (this.parent == null) { this.parent = new SpringApplicationBuilder(sources) .web(WebApplicationType.NONE).properties(this.defaultProperties) .environment(this.environment); } else { this.parent.sources(sources); } return this.parent; } }
public class class_name { public SpringApplicationBuilder parent(Class<?>... sources) { if (this.parent == null) { this.parent = new SpringApplicationBuilder(sources) .web(WebApplicationType.NONE).properties(this.defaultProperties) .environment(this.environment); // depends on control dependency: [if], data = [none] } else { this.parent.sources(sources); // depends on control dependency: [if], data = [none] } return this.parent; } }
public class class_name { public Rules add(PropertyConstraint constraint) { CompoundPropertyConstraint and = (CompoundPropertyConstraint)propertiesConstraints.get(constraint .getPropertyName()); if (and == null) { putPropertyConstraint(constraint); } else { and.add(constraint); } return this; } }
public class class_name { public Rules add(PropertyConstraint constraint) { CompoundPropertyConstraint and = (CompoundPropertyConstraint)propertiesConstraints.get(constraint .getPropertyName()); if (and == null) { putPropertyConstraint(constraint); // depends on control dependency: [if], data = [none] } else { and.add(constraint); // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { public static String decode(String name) { // quick check if (name.indexOf("_x") < 0) { // not encoded return name; } StringBuffer decoded = new StringBuffer(); Matcher m = ENCODE_PATTERN.matcher(name); while (m.find()) { m.appendReplacement(decoded, Character.toString((char)Integer.parseInt(m.group().substring(2, 6), 16))); } m.appendTail(decoded); return decoded.toString(); } }
public class class_name { public static String decode(String name) { // quick check if (name.indexOf("_x") < 0) { // not encoded return name; // depends on control dependency: [if], data = [none] } StringBuffer decoded = new StringBuffer(); Matcher m = ENCODE_PATTERN.matcher(name); while (m.find()) { m.appendReplacement(decoded, Character.toString((char)Integer.parseInt(m.group().substring(2, 6), 16))); // depends on control dependency: [while], data = [none] } m.appendTail(decoded); return decoded.toString(); } }
public class class_name { public static FieldMapping parseFieldMapping(String source, JsonNode mappingNode) { ValidationException.check(mappingNode.isObject(), "A column mapping must be a JSON record"); ValidationException.check(mappingNode.has(TYPE), "Column mappings must have a %s.", TYPE); String type = mappingNode.get(TYPE).asText(); // return easy cases if ("occVersion".equals(type)) { return FieldMapping.version(source); } else if ("key".equals(type)) { return FieldMapping.key(source); } String family = null; String qualifier = null; String prefix = null; // for backward-compatibility, check for "value": "fam:qual" if (mappingNode.has(VALUE)) { // avoids String#split because of odd cases, like ":".split(":") String value = mappingNode.get(VALUE).asText(); Iterator<String> values = VALUE_SPLITTER.split(value).iterator(); if (values.hasNext()) { family = values.next(); } if (values.hasNext()) { if ("keyAsColumn".equals(type)) { prefix = values.next(); if (prefix.isEmpty()) { prefix = null; } } else { qualifier = values.next(); } } } // replace any existing values with explicit family and qualifier if (mappingNode.has(FAMILY)) { family = mappingNode.get(FAMILY).textValue(); } if (mappingNode.has(QUALIFIER)) { qualifier = mappingNode.get(QUALIFIER).textValue(); } if ("column".equals(type)) { ValidationException.check(family != null && !family.isEmpty(), "Column mapping %s must have a %s", source, FAMILY); ValidationException.check(qualifier != null && !qualifier.isEmpty(), "Column mapping %s must have a %s", source, QUALIFIER); return FieldMapping.column(source, family, qualifier); } else if ("keyAsColumn".equals(type)) { ValidationException.check(family != null && !family.isEmpty(), "Column mapping %s must have a %s", source, FAMILY); ValidationException.check(qualifier == null, "Key-as-column mapping %s cannot have a %s", source, QUALIFIER); if (mappingNode.has(PREFIX)) { prefix = mappingNode.get(PREFIX).asText(); if (prefix.isEmpty()) { prefix = null; } } return FieldMapping.keyAsColumn(source, family, prefix); } else if ("counter".equals(type)) { ValidationException.check(family != null && !family.isEmpty(), "Counter mapping %s must have a %s", source, FAMILY); ValidationException.check(qualifier != null && !qualifier.isEmpty(), "Counter mapping %s must have a %s", source, QUALIFIER); return FieldMapping.counter(source, family, qualifier); } else { throw new ValidationException("Invalid mapping type: " + type); } } }
public class class_name { public static FieldMapping parseFieldMapping(String source, JsonNode mappingNode) { ValidationException.check(mappingNode.isObject(), "A column mapping must be a JSON record"); ValidationException.check(mappingNode.has(TYPE), "Column mappings must have a %s.", TYPE); String type = mappingNode.get(TYPE).asText(); // return easy cases if ("occVersion".equals(type)) { return FieldMapping.version(source); // depends on control dependency: [if], data = [none] } else if ("key".equals(type)) { return FieldMapping.key(source); // depends on control dependency: [if], data = [none] } String family = null; String qualifier = null; String prefix = null; // for backward-compatibility, check for "value": "fam:qual" if (mappingNode.has(VALUE)) { // avoids String#split because of odd cases, like ":".split(":") String value = mappingNode.get(VALUE).asText(); Iterator<String> values = VALUE_SPLITTER.split(value).iterator(); if (values.hasNext()) { family = values.next(); // depends on control dependency: [if], data = [none] } if (values.hasNext()) { if ("keyAsColumn".equals(type)) { prefix = values.next(); // depends on control dependency: [if], data = [none] if (prefix.isEmpty()) { prefix = null; // depends on control dependency: [if], data = [none] } } else { qualifier = values.next(); // depends on control dependency: [if], data = [none] } } } // replace any existing values with explicit family and qualifier if (mappingNode.has(FAMILY)) { family = mappingNode.get(FAMILY).textValue(); // depends on control dependency: [if], data = [none] } if (mappingNode.has(QUALIFIER)) { qualifier = mappingNode.get(QUALIFIER).textValue(); // depends on control dependency: [if], data = [none] } if ("column".equals(type)) { ValidationException.check(family != null && !family.isEmpty(), "Column mapping %s must have a %s", source, FAMILY); // depends on control dependency: [if], data = [none] ValidationException.check(qualifier != null && !qualifier.isEmpty(), "Column mapping %s must have a %s", source, QUALIFIER); // depends on control dependency: [if], data = [none] return FieldMapping.column(source, family, qualifier); // depends on control dependency: [if], data = [none] } else if ("keyAsColumn".equals(type)) { ValidationException.check(family != null && !family.isEmpty(), "Column mapping %s must have a %s", source, FAMILY); // depends on control dependency: [if], data = [none] ValidationException.check(qualifier == null, "Key-as-column mapping %s cannot have a %s", source, QUALIFIER); // depends on control dependency: [if], data = [none] if (mappingNode.has(PREFIX)) { prefix = mappingNode.get(PREFIX).asText(); // depends on control dependency: [if], data = [none] if (prefix.isEmpty()) { prefix = null; // depends on control dependency: [if], data = [none] } } return FieldMapping.keyAsColumn(source, family, prefix); // depends on control dependency: [if], data = [none] } else if ("counter".equals(type)) { ValidationException.check(family != null && !family.isEmpty(), "Counter mapping %s must have a %s", source, FAMILY); // depends on control dependency: [if], data = [none] ValidationException.check(qualifier != null && !qualifier.isEmpty(), "Counter mapping %s must have a %s", source, QUALIFIER); // depends on control dependency: [if], data = [none] return FieldMapping.counter(source, family, qualifier); // depends on control dependency: [if], data = [none] } else { throw new ValidationException("Invalid mapping type: " + type); } } }
public class class_name { public void renderOrderDidChange (AbstractMedia media) { if (_tickStamp > 0) { log.warning("Egads! Render order changed during a tick.", new Exception()); } _media.remove(media); _media.insertSorted(media, RENDER_ORDER); } }
public class class_name { public void renderOrderDidChange (AbstractMedia media) { if (_tickStamp > 0) { log.warning("Egads! Render order changed during a tick.", new Exception()); // depends on control dependency: [if], data = [none] } _media.remove(media); _media.insertSorted(media, RENDER_ORDER); } }
public class class_name { void clearNotifications() { if (m_parentNotificationWidget != null) { // restore the previous notification widget CmsNotification.get().setWidget(m_parentNotificationWidget); } if (m_ownNotificationWidget != null) { // remove the overlay notification widget remove(m_ownNotificationWidget); } } }
public class class_name { void clearNotifications() { if (m_parentNotificationWidget != null) { // restore the previous notification widget CmsNotification.get().setWidget(m_parentNotificationWidget); // depends on control dependency: [if], data = [(m_parentNotificationWidget] } if (m_ownNotificationWidget != null) { // remove the overlay notification widget remove(m_ownNotificationWidget); // depends on control dependency: [if], data = [(m_ownNotificationWidget] } } }
public class class_name { public void loadSystemProperties() { LOGGER.info("Loading properties"); if (getProperty("integration-test") == null) { LOGGER.trace("Setting default properties, if necessary."); final String fcrepoHome = getProperty("fcrepo.home"); final String baseDir = (fcrepoHome == null ? getProperty("user.dir") + SEP + "fcrepo4-data" + SEP : fcrepoHome + SEP); for (final PROPERTIES prop : PROPERTIES.values()) { final String value = getProperty(prop.getValue()); if (value == null) { if (prop.getSetDefaultValue()) { setProperty(prop.getValue(), baseDir); } } else { updateRelativePropertyPath(prop.getValue(), value, baseDir); } } } for (final PROPERTIES prop : PROPERTIES.values()) { final String val = prop.getValue(); LOGGER.info("{} = {}", val, getProperty(val)); } } }
public class class_name { public void loadSystemProperties() { LOGGER.info("Loading properties"); if (getProperty("integration-test") == null) { LOGGER.trace("Setting default properties, if necessary."); // depends on control dependency: [if], data = [none] final String fcrepoHome = getProperty("fcrepo.home"); final String baseDir = (fcrepoHome == null ? getProperty("user.dir") + SEP + "fcrepo4-data" + SEP : fcrepoHome + SEP); for (final PROPERTIES prop : PROPERTIES.values()) { final String value = getProperty(prop.getValue()); if (value == null) { if (prop.getSetDefaultValue()) { setProperty(prop.getValue(), baseDir); // depends on control dependency: [if], data = [none] } } else { updateRelativePropertyPath(prop.getValue(), value, baseDir); // depends on control dependency: [if], data = [none] } } } for (final PROPERTIES prop : PROPERTIES.values()) { final String val = prop.getValue(); LOGGER.info("{} = {}", val, getProperty(val)); // depends on control dependency: [for], data = [none] } } }
public class class_name { private static int countSpaceSub(char [] dest,int length, char subChar){ int i = 0; int count = 0; while (i < length) { if (dest[i] == subChar) { count++; } i++; } return count; } }
public class class_name { private static int countSpaceSub(char [] dest,int length, char subChar){ int i = 0; int count = 0; while (i < length) { if (dest[i] == subChar) { count++; // depends on control dependency: [if], data = [none] } i++; // depends on control dependency: [while], data = [none] } return count; } }
public class class_name { @Override public List<MetricSchemaRecord> filterRecords(SchemaQuery query) { requireNotDisposed(); _logger.debug(query.toString()); if(query instanceof MetricSchemaRecordQuery) { long start = System.nanoTime(); List<MetricSchemaRecord> result = _schemaService.get(MetricSchemaRecordQuery.class.cast(query)); _logger.debug("Time to filter records in ms: " + (System.nanoTime() - start) / 1000000); return result; } else { long start = System.nanoTime(); List<MetricSchemaRecord> result = _schemaService.keywordSearch(KeywordQuery.class.cast(query)); _logger.debug("Time to filter records in ms: " + (System.nanoTime() - start) / 1000000); return result; } } }
public class class_name { @Override public List<MetricSchemaRecord> filterRecords(SchemaQuery query) { requireNotDisposed(); _logger.debug(query.toString()); if(query instanceof MetricSchemaRecordQuery) { long start = System.nanoTime(); List<MetricSchemaRecord> result = _schemaService.get(MetricSchemaRecordQuery.class.cast(query)); _logger.debug("Time to filter records in ms: " + (System.nanoTime() - start) / 1000000); // depends on control dependency: [if], data = [none] return result; // depends on control dependency: [if], data = [none] } else { long start = System.nanoTime(); List<MetricSchemaRecord> result = _schemaService.keywordSearch(KeywordQuery.class.cast(query)); _logger.debug("Time to filter records in ms: " + (System.nanoTime() - start) / 1000000); // depends on control dependency: [if], data = [none] return result; // depends on control dependency: [if], data = [none] } } }
public class class_name { public synchronized void setState(DebugState newState) { if (!states.contains(newState)) { switch (newState) { case Disconnected: Assert.isLegal(canChange(DebugState.Disconnected), "Cannot disconnect a terminated state"); case Terminated: Assert.isLegal(canChange(DebugState.Terminated), "Cannot terminate a terminated state"); states.clear(); states.add(newState); break; case Suspended: Assert.isLegal(canChange(DebugState.Suspended), "Can only suspend if resumed"); states.remove(DebugState.Resumed); states.add(newState); break; case IsStepping: Assert.isLegal(canChange(DebugState.IsStepping), "Cannot step if not suspended"); states.add(newState); break; case Resumed: Assert.isLegal(canChange(DebugState.Resumed), "Cannot resume in a terminated state"); if (states.contains(DebugState.IsStepping)) { states.clear(); states.add(DebugState.IsStepping); } else { states.clear(); } states.add(newState); break; case Deadlocked: states.add(newState); break; } } } }
public class class_name { public synchronized void setState(DebugState newState) { if (!states.contains(newState)) { switch (newState) { case Disconnected: Assert.isLegal(canChange(DebugState.Disconnected), "Cannot disconnect a terminated state"); case Terminated: Assert.isLegal(canChange(DebugState.Terminated), "Cannot terminate a terminated state"); states.clear(); states.add(newState); break; case Suspended: Assert.isLegal(canChange(DebugState.Suspended), "Can only suspend if resumed"); states.remove(DebugState.Resumed); states.add(newState); break; case IsStepping: Assert.isLegal(canChange(DebugState.IsStepping), "Cannot step if not suspended"); states.add(newState); break; case Resumed: Assert.isLegal(canChange(DebugState.Resumed), "Cannot resume in a terminated state"); if (states.contains(DebugState.IsStepping)) { states.clear(); // depends on control dependency: [if], data = [none] states.add(DebugState.IsStepping); // depends on control dependency: [if], data = [none] } else { states.clear(); // depends on control dependency: [if], data = [none] } states.add(newState); break; case Deadlocked: states.add(newState); break; } } } }
public class class_name { public static NodeSequence slice( final NodeSequence original, Columns columns ) { final int newWidth = columns.getSelectorNames().size(); if (original.width() == newWidth) { return original; } // We need to return a NodeSequence that includes only the specified selectors. // Step 1: figure out which selector indexes we'll use ... final int[] selectorIndexes = new int[newWidth]; int i = 0; for (String selectorName : columns.getSelectorNames()) { selectorIndexes[i++] = columns.getSelectorIndex(selectorName); } // Step 2: create a NodeSequence that delegates to the original but that returns Batch instances that // return the desired indexes ... return new NodeSequence() { @Override public int width() { return 1; } @Override public long getRowCount() { return original.getRowCount(); } @Override public boolean isEmpty() { return original.isEmpty(); } @Override public Batch nextBatch() { return slicingBatch(original.nextBatch(), selectorIndexes); } @Override public void close() { original.close(); } @Override public String toString() { return "(slice width=" + newWidth + " indexes=" + selectorIndexes + " " + original + " )"; } }; } }
public class class_name { public static NodeSequence slice( final NodeSequence original, Columns columns ) { final int newWidth = columns.getSelectorNames().size(); if (original.width() == newWidth) { return original; // depends on control dependency: [if], data = [none] } // We need to return a NodeSequence that includes only the specified selectors. // Step 1: figure out which selector indexes we'll use ... final int[] selectorIndexes = new int[newWidth]; int i = 0; for (String selectorName : columns.getSelectorNames()) { selectorIndexes[i++] = columns.getSelectorIndex(selectorName); // depends on control dependency: [for], data = [selectorName] } // Step 2: create a NodeSequence that delegates to the original but that returns Batch instances that // return the desired indexes ... return new NodeSequence() { @Override public int width() { return 1; } @Override public long getRowCount() { return original.getRowCount(); } @Override public boolean isEmpty() { return original.isEmpty(); } @Override public Batch nextBatch() { return slicingBatch(original.nextBatch(), selectorIndexes); } @Override public void close() { original.close(); } @Override public String toString() { return "(slice width=" + newWidth + " indexes=" + selectorIndexes + " " + original + " )"; } }; } }
public class class_name { private static String getUserHome() { String home; if (platformType == SelfExtractUtils.PlatformType_CYGWIN) { home = System.getenv("HOME"); } else { home = System.getProperty("user.home"); } return home; } }
public class class_name { private static String getUserHome() { String home; if (platformType == SelfExtractUtils.PlatformType_CYGWIN) { home = System.getenv("HOME"); // depends on control dependency: [if], data = [none] } else { home = System.getProperty("user.home"); // depends on control dependency: [if], data = [none] } return home; } }
public class class_name { @Override public void write(Iterable<QueryResult> results) { logger.debug("Export to '{}' results {}", graphiteHttpUrl, results); HttpURLConnection urlConnection = null; OutputStreamWriter urlWriter; try { StringBuilder sbUrlWriter = new StringBuilder(""); for (QueryResult result : results) { String msg = metricPathPrefix + result.getName() + " " + result.getValue() + " " + result.getEpoch(TimeUnit.SECONDS) + "\n"; logger.debug("Export '{}'", msg); sbUrlWriter.append(msg); } if (sbUrlWriter.length() > 0) { sbUrlWriter.insert(0, "data="); urlConnection = (HttpURLConnection) graphiteHttpUrl.openConnection(); urlConnection.setRequestMethod("POST"); urlConnection.setDoOutput(true); urlWriter = new OutputStreamWriter(urlConnection.getOutputStream(), Charset.forName("UTF-8")); urlWriter.write(sbUrlWriter.toString()); urlWriter.flush(); IoUtils2.closeQuietly(urlWriter); int responseCode = urlConnection.getResponseCode(); if (responseCode != 200) { logger.warn("Failure {}:'{}' to send result to Graphite HTTP proxy'{}' ", responseCode, urlConnection.getResponseMessage(), graphiteHttpUrl); } if (logger.isTraceEnabled()) { IoUtils2.copy(urlConnection.getInputStream(), System.out); } } } catch (Exception e) { logger.warn("Failure to send result to Graphite HTTP proxy '{}'", graphiteHttpUrl, e); } finally { // Release the connection. if (urlConnection != null) { try { InputStream in = urlConnection.getInputStream(); IoUtils2.copy(in, IoUtils2.nullOutputStream()); IoUtils2.closeQuietly(in); InputStream err = urlConnection.getErrorStream(); if (err != null) { IoUtils2.copy(err, IoUtils2.nullOutputStream()); IoUtils2.closeQuietly(err); } } catch (IOException e) { logger.warn("Exception flushing http connection", e); } } } } }
public class class_name { @Override public void write(Iterable<QueryResult> results) { logger.debug("Export to '{}' results {}", graphiteHttpUrl, results); HttpURLConnection urlConnection = null; OutputStreamWriter urlWriter; try { StringBuilder sbUrlWriter = new StringBuilder(""); for (QueryResult result : results) { String msg = metricPathPrefix + result.getName() + " " + result.getValue() + " " + result.getEpoch(TimeUnit.SECONDS) + "\n"; logger.debug("Export '{}'", msg); // depends on control dependency: [for], data = [none] sbUrlWriter.append(msg); // depends on control dependency: [for], data = [none] } if (sbUrlWriter.length() > 0) { sbUrlWriter.insert(0, "data="); // depends on control dependency: [if], data = [none] urlConnection = (HttpURLConnection) graphiteHttpUrl.openConnection(); // depends on control dependency: [if], data = [none] urlConnection.setRequestMethod("POST"); // depends on control dependency: [if], data = [none] urlConnection.setDoOutput(true); // depends on control dependency: [if], data = [none] urlWriter = new OutputStreamWriter(urlConnection.getOutputStream(), Charset.forName("UTF-8")); // depends on control dependency: [if], data = [none] urlWriter.write(sbUrlWriter.toString()); // depends on control dependency: [if], data = [none] urlWriter.flush(); // depends on control dependency: [if], data = [none] IoUtils2.closeQuietly(urlWriter); // depends on control dependency: [if], data = [none] int responseCode = urlConnection.getResponseCode(); if (responseCode != 200) { logger.warn("Failure {}:'{}' to send result to Graphite HTTP proxy'{}' ", responseCode, urlConnection.getResponseMessage(), graphiteHttpUrl); // depends on control dependency: [if], data = [none] } if (logger.isTraceEnabled()) { IoUtils2.copy(urlConnection.getInputStream(), System.out); // depends on control dependency: [if], data = [none] } } } catch (Exception e) { logger.warn("Failure to send result to Graphite HTTP proxy '{}'", graphiteHttpUrl, e); } finally { // depends on control dependency: [catch], data = [none] // Release the connection. if (urlConnection != null) { try { InputStream in = urlConnection.getInputStream(); IoUtils2.copy(in, IoUtils2.nullOutputStream()); // depends on control dependency: [try], data = [none] IoUtils2.closeQuietly(in); // depends on control dependency: [try], data = [none] InputStream err = urlConnection.getErrorStream(); if (err != null) { IoUtils2.copy(err, IoUtils2.nullOutputStream()); // depends on control dependency: [if], data = [(err] IoUtils2.closeQuietly(err); // depends on control dependency: [if], data = [(err] } } catch (IOException e) { logger.warn("Exception flushing http connection", e); } // depends on control dependency: [catch], data = [none] } } } }
public class class_name { public E[] toArray() { int pos = 0; E[] array = (E[]) new Object[size]; if (currentBufferIndex == -1) { return array; } for (int i = 0; i < currentBufferIndex; i++) { int len = buffers[i].length; System.arraycopy(buffers[i], 0, array, pos, len); pos += len; } System.arraycopy(buffers[currentBufferIndex], 0, array, pos, offset); return array; } }
public class class_name { public E[] toArray() { int pos = 0; E[] array = (E[]) new Object[size]; if (currentBufferIndex == -1) { return array; // depends on control dependency: [if], data = [none] } for (int i = 0; i < currentBufferIndex; i++) { int len = buffers[i].length; System.arraycopy(buffers[i], 0, array, pos, len); // depends on control dependency: [for], data = [i] pos += len; // depends on control dependency: [for], data = [none] } System.arraycopy(buffers[currentBufferIndex], 0, array, pos, offset); return array; } }
public class class_name { public static Vector3d normalizeSafe(Vector3d v) { try { return v.normalize(); } catch (ArithmeticException ex) { return Vector3d.ZERO; } } }
public class class_name { public static Vector3d normalizeSafe(Vector3d v) { try { return v.normalize(); // depends on control dependency: [try], data = [none] } catch (ArithmeticException ex) { return Vector3d.ZERO; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public long numParams() { int ret = 0; for (Map.Entry<String, INDArray> entry : params.entrySet()) { ret += entry.getValue().length(); } return ret; } }
public class class_name { public long numParams() { int ret = 0; for (Map.Entry<String, INDArray> entry : params.entrySet()) { ret += entry.getValue().length(); // depends on control dependency: [for], data = [entry] } return ret; } }
public class class_name { public String ordinal(long n) { if (n == 0) { return "zeroth"; } StringBuilder buffer = new StringBuilder(20); if (n < 0) { buffer.append("negative "); n = -n; } n = cardinalGroup(buffer, n, 1000000000000000000L, "quintillion"); n = cardinalGroup(buffer, n, 1000000000000000L, "quadrillion"); n = cardinalGroup(buffer, n, 1000000000000L, "trillion"); n = cardinalGroup(buffer, n, 1000000000L, "billion"); n = cardinalGroup(buffer, n, 1000000L, "million"); n = cardinalGroup(buffer, n, 1000L, "thousand"); if (n == 0) { buffer.append("th"); } else { cardinal999(buffer, n, true); } return buffer.toString(); } }
public class class_name { public String ordinal(long n) { if (n == 0) { return "zeroth"; // depends on control dependency: [if], data = [none] } StringBuilder buffer = new StringBuilder(20); if (n < 0) { buffer.append("negative "); // depends on control dependency: [if], data = [none] n = -n; // depends on control dependency: [if], data = [none] } n = cardinalGroup(buffer, n, 1000000000000000000L, "quintillion"); n = cardinalGroup(buffer, n, 1000000000000000L, "quadrillion"); n = cardinalGroup(buffer, n, 1000000000000L, "trillion"); n = cardinalGroup(buffer, n, 1000000000L, "billion"); n = cardinalGroup(buffer, n, 1000000L, "million"); n = cardinalGroup(buffer, n, 1000L, "thousand"); if (n == 0) { buffer.append("th"); // depends on control dependency: [if], data = [none] } else { cardinal999(buffer, n, true); // depends on control dependency: [if], data = [none] } return buffer.toString(); } }
public class class_name { public static Map<IProject, List<WorkItem>> getResourcesPerProject(IStructuredSelection structuredSelection) { Map<IProject, List<WorkItem>> projectsMap = new HashMap<>(); for (Iterator<?> iter = structuredSelection.iterator(); iter.hasNext();) { Object element = iter.next(); WorkItem workItem = getWorkItem(element); if (workItem == null) { IWorkingSet wset = Util.getAdapter(IWorkingSet.class, element); if (wset != null) { mapResources(wset, projectsMap); continue; } // Support for active changesets ChangeSet set = Util.getAdapter(ChangeSet.class, element); for (WorkItem change : getResources(set)) { mapResource(change, projectsMap, true); } continue; } mapResource(workItem, projectsMap, false); } return projectsMap; } }
public class class_name { public static Map<IProject, List<WorkItem>> getResourcesPerProject(IStructuredSelection structuredSelection) { Map<IProject, List<WorkItem>> projectsMap = new HashMap<>(); for (Iterator<?> iter = structuredSelection.iterator(); iter.hasNext();) { Object element = iter.next(); WorkItem workItem = getWorkItem(element); if (workItem == null) { IWorkingSet wset = Util.getAdapter(IWorkingSet.class, element); if (wset != null) { mapResources(wset, projectsMap); // depends on control dependency: [if], data = [(wset] continue; } // Support for active changesets ChangeSet set = Util.getAdapter(ChangeSet.class, element); for (WorkItem change : getResources(set)) { mapResource(change, projectsMap, true); // depends on control dependency: [for], data = [change] } continue; } mapResource(workItem, projectsMap, false); // depends on control dependency: [for], data = [none] } return projectsMap; } }
public class class_name { public void poseFromBones() { for (int i = 0; i < getNumBones(); ++i) { GVRSceneObject bone = mBones[i]; if (bone == null) { continue; } if ((mBoneOptions[i] & BONE_LOCK_ROTATION) != 0) { continue; } GVRTransform trans = bone.getTransform(); mPose.setLocalMatrix(i, trans.getLocalModelMatrix4f()); } mPose.sync(); updateBonePose(); } }
public class class_name { public void poseFromBones() { for (int i = 0; i < getNumBones(); ++i) { GVRSceneObject bone = mBones[i]; if (bone == null) { continue; } if ((mBoneOptions[i] & BONE_LOCK_ROTATION) != 0) { continue; } GVRTransform trans = bone.getTransform(); mPose.setLocalMatrix(i, trans.getLocalModelMatrix4f()); // depends on control dependency: [for], data = [i] } mPose.sync(); updateBonePose(); } }