code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { @JsonIgnore public String getEncryptionCertificateDecoded() { if (EncodingUtils.isBase64(encryptionCertificate)) { return EncodingUtils.decodeBase64ToString(encryptionCertificate); } return encryptionCertificate; } }
public class class_name { @JsonIgnore public String getEncryptionCertificateDecoded() { if (EncodingUtils.isBase64(encryptionCertificate)) { return EncodingUtils.decodeBase64ToString(encryptionCertificate); // depends on control dependency: [if], data = [none] } return encryptionCertificate; } }
public class class_name { public final void drawParagraph(String[] texts, float dstX, float dstY, float dstZ, float paragraphWidth, ALIGN align) throws IOException { if (LwjgFontUtil.isEmpty(texts)) { return; } String buff = ""; for (String text: texts) { if (!LwjgFontUtil.isEmpty(text)) { buff += "\n"; } buff += text; } drawParagraph(buff, dstX, dstY, dstZ, paragraphWidth, align); } }
public class class_name { public final void drawParagraph(String[] texts, float dstX, float dstY, float dstZ, float paragraphWidth, ALIGN align) throws IOException { if (LwjgFontUtil.isEmpty(texts)) { return; } String buff = ""; for (String text: texts) { if (!LwjgFontUtil.isEmpty(text)) { buff += "\n"; // depends on control dependency: [if], data = [none] } buff += text; } drawParagraph(buff, dstX, dstY, dstZ, paragraphWidth, align); } }
public class class_name { private BufferedImage createBinaryImage(int w, int h, int pixelBits) { int bytesPerRow = w * pixelBits / 8; if ((w * pixelBits % 8) != 0) { bytesPerRow++; } byte[] imageData = new byte[h * bytesPerRow]; IndexColorModel cm = null; switch (pixelBits) { case 1: { cm = new IndexColorModel(pixelBits, lut1Arr.length, lut1Arr, lut1Arr, lut1Arr); break; } case 2: { cm = new IndexColorModel(pixelBits, lut2Arr.length, lut2Arr, lut2Arr, lut2Arr); break; } case 4: { cm = new IndexColorModel(pixelBits, lut4Arr.length, lut4Arr, lut4Arr, lut4Arr); break; } default: { new Exception("Invalid # of bit per pixel").printStackTrace(); } } DataBuffer db = new DataBufferByte(imageData, imageData.length); WritableRaster r = Raster.createPackedRaster(db, w, h, pixelBits, null); return new BufferedImage(cm, r, false, null); } }
public class class_name { private BufferedImage createBinaryImage(int w, int h, int pixelBits) { int bytesPerRow = w * pixelBits / 8; if ((w * pixelBits % 8) != 0) { bytesPerRow++; // depends on control dependency: [if], data = [none] } byte[] imageData = new byte[h * bytesPerRow]; IndexColorModel cm = null; switch (pixelBits) { case 1: { cm = new IndexColorModel(pixelBits, lut1Arr.length, lut1Arr, lut1Arr, lut1Arr); break; } case 2: { cm = new IndexColorModel(pixelBits, lut2Arr.length, lut2Arr, lut2Arr, lut2Arr); break; } case 4: { cm = new IndexColorModel(pixelBits, lut4Arr.length, lut4Arr, lut4Arr, lut4Arr); break; } default: { new Exception("Invalid # of bit per pixel").printStackTrace(); } } DataBuffer db = new DataBufferByte(imageData, imageData.length); WritableRaster r = Raster.createPackedRaster(db, w, h, pixelBits, null); return new BufferedImage(cm, r, false, null); } }
public class class_name { public StrBuilder appendAll(final Iterable<?> iterable) { if (iterable != null) { for (final Object o : iterable) { append(o); } } return this; } }
public class class_name { public StrBuilder appendAll(final Iterable<?> iterable) { if (iterable != null) { for (final Object o : iterable) { append(o); // depends on control dependency: [for], data = [o] } } return this; } }
public class class_name { public boolean check() { String path = StagePathUtils.getMainStem(getPipelineId()); try { byte[] bytes = zookeeper.readData(path); Long nid = ArbitrateConfigUtils.getCurrentNid(); MainStemEventData eventData = JsonUtils.unmarshalFromByte(bytes, MainStemEventData.class); activeData = eventData;// 更新下为最新值 // 检查下nid是否为自己 boolean result = nid.equals(eventData.getNid()); if (!result) { logger.warn("mainstem is running in node[{}] , but not in node[{}]", eventData.getNid(), nid); } return result; } catch (ZkNoNodeException e) { logger.warn("mainstem is not run any in node"); return false; } catch (ZkInterruptedException e) { logger.warn("mainstem check is interrupt"); Thread.interrupted();// 清除interrupt标记 return check(); } catch (ZkException e) { logger.warn("mainstem check is failed"); return false; } } }
public class class_name { public boolean check() { String path = StagePathUtils.getMainStem(getPipelineId()); try { byte[] bytes = zookeeper.readData(path); Long nid = ArbitrateConfigUtils.getCurrentNid(); MainStemEventData eventData = JsonUtils.unmarshalFromByte(bytes, MainStemEventData.class); activeData = eventData;// 更新下为最新值 // depends on control dependency: [try], data = [none] // 检查下nid是否为自己 boolean result = nid.equals(eventData.getNid()); if (!result) { logger.warn("mainstem is running in node[{}] , but not in node[{}]", eventData.getNid(), nid); // depends on control dependency: [if], data = [none] } return result; // depends on control dependency: [try], data = [none] } catch (ZkNoNodeException e) { logger.warn("mainstem is not run any in node"); return false; } catch (ZkInterruptedException e) { // depends on control dependency: [catch], data = [none] logger.warn("mainstem check is interrupt"); Thread.interrupted();// 清除interrupt标记 return check(); } catch (ZkException e) { // depends on control dependency: [catch], data = [none] logger.warn("mainstem check is failed"); return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Set<Association> getAssociationsFromDB(final JAASSystem _jaasSystem) throws EFapsException { final Set<Association> ret = new HashSet<>(); Connection con = null; try { final List<Long> associationIds = new ArrayList<>(); con = Context.getConnection(); Statement stmt = null; try { final StringBuilder cmd = new StringBuilder(); cmd.append("select ").append("ID ") .append("from T_USERASSOC ") .append("where GROUPID in (") .append("select ").append("USERABSTRACTTO ") .append("from V_USERPERSON2GROUP ") .append("where USERABSTRACTFROM =").append(getId()) .append(") and ROLEID in (") .append("select ").append("USERABSTRACTTO ") .append("from V_USERPERSON2ROLE ") .append("where USERABSTRACTFROM =").append(getId()) .append(")"); stmt = con.createStatement(); final ResultSet resultset = stmt.executeQuery(cmd.toString()); while (resultset.next()) { associationIds.add(resultset.getLong(1)); } resultset.close(); } catch (final SQLException e) { throw new EFapsException(getClass(), "getAssociationsFromDB.SQLException", e, getName()); } finally { try { if (stmt != null) { stmt.close(); con.commit(); } } catch (final SQLException e) { throw new EFapsException(getClass(), "getAssociationsFromDB.SQLException", e, getName()); } } for (final Long associationId : associationIds) { final Association association = Association.get(associationId); ret.add(association); } } finally { try { if (con != null && !con.isClosed()) { con.close(); } } catch (final SQLException e) { throw new CacheReloadException("could not read child type ids", e); } } return ret; } }
public class class_name { public Set<Association> getAssociationsFromDB(final JAASSystem _jaasSystem) throws EFapsException { final Set<Association> ret = new HashSet<>(); Connection con = null; try { final List<Long> associationIds = new ArrayList<>(); con = Context.getConnection(); Statement stmt = null; try { final StringBuilder cmd = new StringBuilder(); cmd.append("select ").append("ID ") .append("from T_USERASSOC ") .append("where GROUPID in (") .append("select ").append("USERABSTRACTTO ") .append("from V_USERPERSON2GROUP ") .append("where USERABSTRACTFROM =").append(getId()) .append(") and ROLEID in (") .append("select ").append("USERABSTRACTTO ") .append("from V_USERPERSON2ROLE ") .append("where USERABSTRACTFROM =").append(getId()) .append(")"); // depends on control dependency: [try], data = [none] stmt = con.createStatement(); // depends on control dependency: [try], data = [none] final ResultSet resultset = stmt.executeQuery(cmd.toString()); while (resultset.next()) { associationIds.add(resultset.getLong(1)); // depends on control dependency: [while], data = [none] } resultset.close(); // depends on control dependency: [try], data = [none] } catch (final SQLException e) { throw new EFapsException(getClass(), "getAssociationsFromDB.SQLException", e, getName()); } finally { // depends on control dependency: [catch], data = [none] try { if (stmt != null) { stmt.close(); // depends on control dependency: [if], data = [none] con.commit(); // depends on control dependency: [if], data = [none] } } catch (final SQLException e) { throw new EFapsException(getClass(), "getAssociationsFromDB.SQLException", e, getName()); } // depends on control dependency: [catch], data = [none] } for (final Long associationId : associationIds) { final Association association = Association.get(associationId); ret.add(association); // depends on control dependency: [for], data = [none] } } finally { try { if (con != null && !con.isClosed()) { con.close(); // depends on control dependency: [if], data = [none] } } catch (final SQLException e) { throw new CacheReloadException("could not read child type ids", e); } // depends on control dependency: [catch], data = [none] } return ret; } }
public class class_name { public int drainTo(@NotNull byte[] dest, int destOffset, int maxSize) { int s = maxSize; while (hasRemaining()) { ByteBuf buf = bufs[first]; int remaining = buf.readRemaining(); if (s < remaining) { arraycopy(buf.array(), buf.head(), dest, destOffset, s); buf.moveHead(s); return maxSize; } else { arraycopy(buf.array(), buf.head(), dest, destOffset, remaining); buf.recycle(); first = next(first); s -= remaining; destOffset += remaining; } } return maxSize - s; } }
public class class_name { public int drainTo(@NotNull byte[] dest, int destOffset, int maxSize) { int s = maxSize; while (hasRemaining()) { ByteBuf buf = bufs[first]; int remaining = buf.readRemaining(); if (s < remaining) { arraycopy(buf.array(), buf.head(), dest, destOffset, s); // depends on control dependency: [if], data = [none] buf.moveHead(s); // depends on control dependency: [if], data = [(s] return maxSize; // depends on control dependency: [if], data = [none] } else { arraycopy(buf.array(), buf.head(), dest, destOffset, remaining); // depends on control dependency: [if], data = [remaining)] buf.recycle(); // depends on control dependency: [if], data = [none] first = next(first); // depends on control dependency: [if], data = [none] s -= remaining; // depends on control dependency: [if], data = [none] destOffset += remaining; // depends on control dependency: [if], data = [none] } } return maxSize - s; } }
public class class_name { public void addSingularAttribute(String attributeName, SingularAttribute<X, ?> attribute) { if (declaredSingluarAttribs == null) { declaredSingluarAttribs = new HashMap<String, SingularAttribute<X, ?>>(); } declaredSingluarAttribs.put(attributeName, attribute); onValidateAttributeConstraints((Field) attribute.getJavaMember()); onEmbeddableAttribute((Field) attribute.getJavaMember()); } }
public class class_name { public void addSingularAttribute(String attributeName, SingularAttribute<X, ?> attribute) { if (declaredSingluarAttribs == null) { declaredSingluarAttribs = new HashMap<String, SingularAttribute<X, ?>>(); } declaredSingluarAttribs.put(attributeName, attribute); onValidateAttributeConstraints((Field) attribute.getJavaMember()); onEmbeddableAttribute((Field) attribute.getJavaMember()); // depends on control dependency: [if], data = [none] } }
public class class_name { @Override public AttributesTable createAttributesTable(String tableName, String idColumnName, List<AttributesColumn> additionalColumns, List<UserUniqueConstraint<AttributesColumn>> uniqueConstraints) { if (idColumnName == null) { idColumnName = "id"; } List<AttributesColumn> columns = new ArrayList<AttributesColumn>(); columns.add(AttributesColumn.createPrimaryKeyColumn(0, idColumnName)); if (additionalColumns != null) { columns.addAll(additionalColumns); } return createAttributesTable(tableName, columns, uniqueConstraints); } }
public class class_name { @Override public AttributesTable createAttributesTable(String tableName, String idColumnName, List<AttributesColumn> additionalColumns, List<UserUniqueConstraint<AttributesColumn>> uniqueConstraints) { if (idColumnName == null) { idColumnName = "id"; // depends on control dependency: [if], data = [none] } List<AttributesColumn> columns = new ArrayList<AttributesColumn>(); columns.add(AttributesColumn.createPrimaryKeyColumn(0, idColumnName)); if (additionalColumns != null) { columns.addAll(additionalColumns); // depends on control dependency: [if], data = [(additionalColumns] } return createAttributesTable(tableName, columns, uniqueConstraints); } }
public class class_name { public UpdateUserPoolClientRequest withAllowedOAuthFlows(String... allowedOAuthFlows) { if (this.allowedOAuthFlows == null) { setAllowedOAuthFlows(new java.util.ArrayList<String>(allowedOAuthFlows.length)); } for (String ele : allowedOAuthFlows) { this.allowedOAuthFlows.add(ele); } return this; } }
public class class_name { public UpdateUserPoolClientRequest withAllowedOAuthFlows(String... allowedOAuthFlows) { if (this.allowedOAuthFlows == null) { setAllowedOAuthFlows(new java.util.ArrayList<String>(allowedOAuthFlows.length)); // depends on control dependency: [if], data = [none] } for (String ele : allowedOAuthFlows) { this.allowedOAuthFlows.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static NutMap checkPayReturn(String xml, String key) { try { NutMap map = Xmls.asMap(xmls().parse(new InputSource(new StringReader(xml))).getDocumentElement()); return checkPayReturnMap(map, key); } catch (Exception e) { throw Lang.makeThrow("e.wx.pay.re.error : %s", xml); } } }
public class class_name { public static NutMap checkPayReturn(String xml, String key) { try { NutMap map = Xmls.asMap(xmls().parse(new InputSource(new StringReader(xml))).getDocumentElement()); return checkPayReturnMap(map, key); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw Lang.makeThrow("e.wx.pay.re.error : %s", xml); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override protected void initGraphics() { super.initGraphics(); chartEventListener = e -> drawChart(); tile.getChartData().forEach(chartData -> chartData.addChartDataEventListener(chartEventListener)); chartDataListener = c -> { while (c.next()) { if (c.wasAdded()) { c.getAddedSubList().forEach(addedItem -> addedItem.addChartDataEventListener(chartEventListener)); } else if (c.wasRemoved()) { c.getRemoved().forEach(removedItem -> removedItem.removeChartDataEventListener(chartEventListener)); } } drawChart(); }; clickHandler = e -> { System.out.println("clicked"); double x = e.getX(); double y = e.getY(); double startAngle = 90; List<ChartData> dataList = tile.getChartData(); int noOfItems = dataList.size(); double canvasSize = chartCanvas.getWidth(); double radius = canvasSize * 0.5; double innerSpacer = radius * 0.18; double barWidth = (radius - innerSpacer) / tile.getChartData().size(); double max = noOfItems == 0 ? 0 : dataList.stream().max(Comparator.comparingDouble(ChartData::getValue)).get().getValue(); for (int i = 0 ; i < noOfItems ; i++) { ChartData data = dataList.get(i); double value = clamp(0, Double.MAX_VALUE, data.getValue()); double barXY = barWidth * 0.5 + i * barWidth; double barWH = canvasSize - barWidth - (2 * i * barWidth); double angle = value / max * 270.0; double centerX = barXY + barWH * 0.5; double centerY = centerX; boolean hit = Helper.isInRingSegment(x, y, centerX, centerY, (barWH + barWidth) * 0.5, (barWH - barWidth) * 0.5, startAngle, angle); if (hit) { tile.fireTileEvent(new TileEvent(EventType.SELECTED_CHART_DATA, data)); break; } } }; titleText = new Text(); titleText.setFill(tile.getTitleColor()); Helper.enableNode(titleText, !tile.getTitle().isEmpty()); text = new Text(tile.getText()); text.setFill(tile.getTextColor()); Helper.enableNode(text, tile.isTextVisible()); chartCanvas = new Canvas(size * 0.9, tile.isTextVisible() ? size * 0.72 : size * 0.795); chartCtx = chartCanvas.getGraphicsContext2D(); getPane().getChildren().addAll(titleText, chartCanvas, text); } }
public class class_name { @Override protected void initGraphics() { super.initGraphics(); chartEventListener = e -> drawChart(); tile.getChartData().forEach(chartData -> chartData.addChartDataEventListener(chartEventListener)); chartDataListener = c -> { while (c.next()) { if (c.wasAdded()) { c.getAddedSubList().forEach(addedItem -> addedItem.addChartDataEventListener(chartEventListener)); // depends on control dependency: [if], data = [none] } else if (c.wasRemoved()) { c.getRemoved().forEach(removedItem -> removedItem.removeChartDataEventListener(chartEventListener)); // depends on control dependency: [if], data = [none] } } drawChart(); }; clickHandler = e -> { System.out.println("clicked"); double x = e.getX(); double y = e.getY(); double startAngle = 90; List<ChartData> dataList = tile.getChartData(); int noOfItems = dataList.size(); double canvasSize = chartCanvas.getWidth(); double radius = canvasSize * 0.5; double innerSpacer = radius * 0.18; double barWidth = (radius - innerSpacer) / tile.getChartData().size(); double max = noOfItems == 0 ? 0 : dataList.stream().max(Comparator.comparingDouble(ChartData::getValue)).get().getValue(); for (int i = 0 ; i < noOfItems ; i++) { ChartData data = dataList.get(i); double value = clamp(0, Double.MAX_VALUE, data.getValue()); double barXY = barWidth * 0.5 + i * barWidth; double barWH = canvasSize - barWidth - (2 * i * barWidth); double angle = value / max * 270.0; double centerX = barXY + barWH * 0.5; double centerY = centerX; boolean hit = Helper.isInRingSegment(x, y, centerX, centerY, (barWH + barWidth) * 0.5, (barWH - barWidth) * 0.5, startAngle, angle); if (hit) { tile.fireTileEvent(new TileEvent(EventType.SELECTED_CHART_DATA, data)); break; } } }; titleText = new Text(); titleText.setFill(tile.getTitleColor()); Helper.enableNode(titleText, !tile.getTitle().isEmpty()); text = new Text(tile.getText()); text.setFill(tile.getTextColor()); Helper.enableNode(text, tile.isTextVisible()); chartCanvas = new Canvas(size * 0.9, tile.isTextVisible() ? size * 0.72 : size * 0.795); chartCtx = chartCanvas.getGraphicsContext2D(); getPane().getChildren().addAll(titleText, chartCanvas, text); } }
public class class_name { public void marshall(StartInstanceRequest startInstanceRequest, ProtocolMarshaller protocolMarshaller) { if (startInstanceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(startInstanceRequest.getInstanceId(), INSTANCEID_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(StartInstanceRequest startInstanceRequest, ProtocolMarshaller protocolMarshaller) { if (startInstanceRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(startInstanceRequest.getInstanceId(), INSTANCEID_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static Expression getFieldExpression(DelegateTask task, String fieldName) { if (task.getCurrentActivitiListener() != null) { List<FieldExtension> fieldExtensions = task.getCurrentActivitiListener().getFieldExtensions(); if (fieldExtensions != null && fieldExtensions.size() > 0) { for (FieldExtension fieldExtension : fieldExtensions) { if (fieldName.equals(fieldExtension.getFieldName())) { return createExpressionForField(fieldExtension); } } } } return null; } }
public class class_name { public static Expression getFieldExpression(DelegateTask task, String fieldName) { if (task.getCurrentActivitiListener() != null) { List<FieldExtension> fieldExtensions = task.getCurrentActivitiListener().getFieldExtensions(); if (fieldExtensions != null && fieldExtensions.size() > 0) { for (FieldExtension fieldExtension : fieldExtensions) { if (fieldName.equals(fieldExtension.getFieldName())) { return createExpressionForField(fieldExtension); // depends on control dependency: [if], data = [none] } } } } return null; } }
public class class_name { public void onMetadataFileForceDelete( @Observes final FileDeletionEvent event ) { EventMetadata eventMetadata = event.getEventMetadata(); if ( !Boolean.TRUE.equals( eventMetadata.get( CHECK_CACHE_ONLY ) ) ) { return; } logger.trace( "Got file-delete event: {}", event ); Transfer transfer = event.getTransfer(); String path = transfer.getPath(); if ( !path.endsWith( METADATA_NAME ) ) { logger.trace( "Not {} , path: {}", METADATA_NAME, path ); return; } Location loc = transfer.getLocation(); if ( !( loc instanceof KeyedLocation ) ) { logger.trace( "Ignore FileDeletionEvent, not a KeyedLocation, location: {}", loc ); return; } KeyedLocation keyedLocation = (KeyedLocation) loc; StoreKey storeKey = keyedLocation.getKey(); try { ArtifactStore store = storeManager.getArtifactStore( storeKey ); metadataGenerator.clearAllMerged( store, path ); } catch ( IndyDataException e ) { logger.error( "Handle FileDeletionEvent failed", e ); } } }
public class class_name { public void onMetadataFileForceDelete( @Observes final FileDeletionEvent event ) { EventMetadata eventMetadata = event.getEventMetadata(); if ( !Boolean.TRUE.equals( eventMetadata.get( CHECK_CACHE_ONLY ) ) ) { return; // depends on control dependency: [if], data = [none] } logger.trace( "Got file-delete event: {}", event ); Transfer transfer = event.getTransfer(); String path = transfer.getPath(); if ( !path.endsWith( METADATA_NAME ) ) { logger.trace( "Not {} , path: {}", METADATA_NAME, path ); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } Location loc = transfer.getLocation(); if ( !( loc instanceof KeyedLocation ) ) { logger.trace( "Ignore FileDeletionEvent, not a KeyedLocation, location: {}", loc ); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } KeyedLocation keyedLocation = (KeyedLocation) loc; StoreKey storeKey = keyedLocation.getKey(); try { ArtifactStore store = storeManager.getArtifactStore( storeKey ); metadataGenerator.clearAllMerged( store, path ); // depends on control dependency: [try], data = [none] } catch ( IndyDataException e ) { logger.error( "Handle FileDeletionEvent failed", e ); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public RemoteCallReturn invokeSoapCall(SoapCall<BindingProvider> soapCall) { BindingProvider webService = soapCall.getSoapClient(); RemoteCallReturn.Builder builder = new RemoteCallReturn.Builder(); synchronized (webService) { Object result = null; try { result = invoke(soapCall); } catch (InvocationTargetException e) { builder.withException(e.getTargetException()); } catch (Exception e) { builder.withException(e); } finally { JaxWsSoapContextHandler contextHandler = getContextHandlerFromClient(webService); String url = getEndpointAddress(webService); builder.withRequestInfo(contextHandler.getLastRequestInfoBuilder().withUrl(url).build()); builder.withResponseInfo(contextHandler.getLastResponseInfoBuilder().build()); } return builder.withReturnValue(result).build(); } } }
public class class_name { @Override public RemoteCallReturn invokeSoapCall(SoapCall<BindingProvider> soapCall) { BindingProvider webService = soapCall.getSoapClient(); RemoteCallReturn.Builder builder = new RemoteCallReturn.Builder(); synchronized (webService) { Object result = null; try { result = invoke(soapCall); // depends on control dependency: [try], data = [none] } catch (InvocationTargetException e) { builder.withException(e.getTargetException()); } catch (Exception e) { // depends on control dependency: [catch], data = [none] builder.withException(e); } finally { // depends on control dependency: [catch], data = [none] JaxWsSoapContextHandler contextHandler = getContextHandlerFromClient(webService); String url = getEndpointAddress(webService); builder.withRequestInfo(contextHandler.getLastRequestInfoBuilder().withUrl(url).build()); builder.withResponseInfo(contextHandler.getLastResponseInfoBuilder().build()); } return builder.withReturnValue(result).build(); } } }
public class class_name { @Override public Object get(int key, Object defaultValue) { if (key <= 0) return defaultValue; if (key > list.size()) return defaultValue; try { Object rtn = list.get(key - 1); if (rtn == null) { if (NullSupportHelper.full()) { return null; } return defaultValue; } return rtn; } catch (Exception e) { return defaultValue; } } }
public class class_name { @Override public Object get(int key, Object defaultValue) { if (key <= 0) return defaultValue; if (key > list.size()) return defaultValue; try { Object rtn = list.get(key - 1); if (rtn == null) { if (NullSupportHelper.full()) { return null; // depends on control dependency: [if], data = [none] } return defaultValue; // depends on control dependency: [if], data = [none] } return rtn; // depends on control dependency: [try], data = [none] } catch (Exception e) { return defaultValue; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Format getFormatByArgumentName(String argumentName) { if (cachedFormatters == null) { return null; } int argNumber = MessagePattern.validateArgumentName(argumentName); if (argNumber < MessagePattern.ARG_NAME_NOT_NUMBER) { return null; } for (int partIndex = 0; (partIndex = nextTopLevelArgStart(partIndex)) >= 0;) { if (argNameMatches(partIndex + 1, argumentName, argNumber)) { return cachedFormatters.get(partIndex); } } return null; } }
public class class_name { public Format getFormatByArgumentName(String argumentName) { if (cachedFormatters == null) { return null; // depends on control dependency: [if], data = [none] } int argNumber = MessagePattern.validateArgumentName(argumentName); if (argNumber < MessagePattern.ARG_NAME_NOT_NUMBER) { return null; // depends on control dependency: [if], data = [none] } for (int partIndex = 0; (partIndex = nextTopLevelArgStart(partIndex)) >= 0;) { if (argNameMatches(partIndex + 1, argumentName, argNumber)) { return cachedFormatters.get(partIndex); // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { @Override public String readLine(String prompt) { try { return console.read(prompt); } catch (IOException e) { e.printStackTrace(); return null; } } }
public class class_name { @Override public String readLine(String prompt) { try { return console.read(prompt); // depends on control dependency: [try], data = [none] } catch (IOException e) { e.printStackTrace(); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static void planarToBuffered_F32(Planar<GrayF32> src, DataBuffer buffer, BufferedImage dst) { if(BoofConcurrency.USE_CONCURRENT ) { try { if (buffer.getDataType() == DataBuffer.TYPE_BYTE && isKnownByteFormat(dst)) { ImplConvertRaster_MT.planarToBuffered_F32(src, (DataBufferByte) buffer, dst.getRaster()); } else if (buffer.getDataType() == DataBuffer.TYPE_INT) { ImplConvertRaster_MT.planarToBuffered_F32(src, (DataBufferInt) buffer, dst.getRaster()); } else { ImplConvertRaster_MT.planarToBuffered_F32(src, dst); } // hack so that it knows the buffer has been modified dst.setRGB(0, 0, dst.getRGB(0, 0)); } catch (java.security.AccessControlException e) { ImplConvertRaster_MT.planarToBuffered_F32(src, dst); } } else { try { if (buffer.getDataType() == DataBuffer.TYPE_BYTE && isKnownByteFormat(dst)) { ImplConvertRaster.planarToBuffered_F32(src, (DataBufferByte) buffer, dst.getRaster()); } else if (buffer.getDataType() == DataBuffer.TYPE_INT) { ImplConvertRaster.planarToBuffered_F32(src, (DataBufferInt) buffer, dst.getRaster()); } else { ImplConvertRaster.planarToBuffered_F32(src, dst); } // hack so that it knows the buffer has been modified dst.setRGB(0, 0, dst.getRGB(0, 0)); } catch (java.security.AccessControlException e) { ImplConvertRaster.planarToBuffered_F32(src, dst); } } } }
public class class_name { public static void planarToBuffered_F32(Planar<GrayF32> src, DataBuffer buffer, BufferedImage dst) { if(BoofConcurrency.USE_CONCURRENT ) { try { if (buffer.getDataType() == DataBuffer.TYPE_BYTE && isKnownByteFormat(dst)) { ImplConvertRaster_MT.planarToBuffered_F32(src, (DataBufferByte) buffer, dst.getRaster()); // depends on control dependency: [if], data = [none] } else if (buffer.getDataType() == DataBuffer.TYPE_INT) { ImplConvertRaster_MT.planarToBuffered_F32(src, (DataBufferInt) buffer, dst.getRaster()); // depends on control dependency: [if], data = [none] } else { ImplConvertRaster_MT.planarToBuffered_F32(src, dst); // depends on control dependency: [if], data = [none] } // hack so that it knows the buffer has been modified dst.setRGB(0, 0, dst.getRGB(0, 0)); // depends on control dependency: [try], data = [none] } catch (java.security.AccessControlException e) { ImplConvertRaster_MT.planarToBuffered_F32(src, dst); } // depends on control dependency: [catch], data = [none] } else { try { if (buffer.getDataType() == DataBuffer.TYPE_BYTE && isKnownByteFormat(dst)) { ImplConvertRaster.planarToBuffered_F32(src, (DataBufferByte) buffer, dst.getRaster()); // depends on control dependency: [if], data = [none] } else if (buffer.getDataType() == DataBuffer.TYPE_INT) { ImplConvertRaster.planarToBuffered_F32(src, (DataBufferInt) buffer, dst.getRaster()); // depends on control dependency: [if], data = [none] } else { ImplConvertRaster.planarToBuffered_F32(src, dst); // depends on control dependency: [if], data = [none] } // hack so that it knows the buffer has been modified dst.setRGB(0, 0, dst.getRGB(0, 0)); // depends on control dependency: [try], data = [none] } catch (java.security.AccessControlException e) { ImplConvertRaster.planarToBuffered_F32(src, dst); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { private void processResourceAssignments(Task task, List<MapRow> assignments) { for (MapRow row : assignments) { processResourceAssignment(task, row); } } }
public class class_name { private void processResourceAssignments(Task task, List<MapRow> assignments) { for (MapRow row : assignments) { processResourceAssignment(task, row); // depends on control dependency: [for], data = [row] } } }
public class class_name { public void addCommandRegistryListener(CommandRegistryListener listener) { if (logger.isDebugEnabled()) { logger.debug("Adding command registry listener " + listener); } commandRegistryListeners.add(listener); } }
public class class_name { public void addCommandRegistryListener(CommandRegistryListener listener) { if (logger.isDebugEnabled()) { logger.debug("Adding command registry listener " + listener); // depends on control dependency: [if], data = [none] } commandRegistryListeners.add(listener); } }
public class class_name { public Flow[] getFlows() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getFlows"); } Flow[] clonedFlows = flows.clone(); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "getFlows", clonedFlows); } return clonedFlows; } }
public class class_name { public Flow[] getFlows() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "getFlows"); // depends on control dependency: [if], data = [none] } Flow[] clonedFlows = flows.clone(); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "getFlows", clonedFlows); // depends on control dependency: [if], data = [none] } return clonedFlows; } }
public class class_name { public static final KeyPressHandler getRegExKeyPressHandler(final String pregEx) { if (StringUtils.isEmpty(pregEx)) { return null; } RegExKeyPressHandler result = HandlerFactory.REG_EX_KEY_PRESS_HANDLER_MAP.get(pregEx); if (result == null) { result = new RegExKeyPressHandler(pregEx); } return result; } }
public class class_name { public static final KeyPressHandler getRegExKeyPressHandler(final String pregEx) { if (StringUtils.isEmpty(pregEx)) { return null; // depends on control dependency: [if], data = [none] } RegExKeyPressHandler result = HandlerFactory.REG_EX_KEY_PRESS_HANDLER_MAP.get(pregEx); if (result == null) { result = new RegExKeyPressHandler(pregEx); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public static int medoid(boolean parallel, Collection<Integer> indecies, double tol, List<? extends Vec> X, DistanceMetric dm, List<Double> accel) { final int N = indecies.size(); if(tol <= 0 || N < SystemInfo.LogicalCores)//Really just not enough points, lets simplify return PAM.medoid(parallel, indecies, X, dm, accel); final double log2d = Math.log(1)-Math.log(tol); /** * Online estimate of the standard deviation that will be used */ final OnLineStatistics distanceStats; /** * This array contains the current sum of all distance computations done * for each index. Corresponds to mu in the paper. */ AtomicDoubleArray totalDistSum = new AtomicDoubleArray(N); /** * This array contains the current number of distance computations that * have been done for each feature index. Corresponds to T_i in the * paper. */ AtomicIntegerArray totalDistCount = new AtomicIntegerArray(N); final int[] indx_map = indecies.stream().mapToInt(i->i).toArray(); final boolean symetric = dm.isSymmetric(); final double[] lower_bound_est = new double[N]; final double[] upper_bound_est = new double[N]; ThreadLocal<Random> localRand = ThreadLocal.withInitial(RandomUtil::getRandom); //First pass, lets pull every "arm" (compute a dsitance) for each datumn at least once, so that we have estiamtes to work with. distanceStats = ParallelUtils.run(parallel, N, (start, end)-> { Random rand = localRand.get(); OnLineStatistics localStats = new OnLineStatistics(); for(int i = start; i < end; i++) { int j = rand.nextInt(N); while(j == i) j = rand.nextInt(N); double d_ij = dm.dist(indx_map[i], indx_map[j], X, accel); localStats.add(d_ij); totalDistSum.addAndGet(i, d_ij); totalDistCount.incrementAndGet(i); if(symetric) { totalDistSum.addAndGet(j, d_ij); totalDistCount.incrementAndGet(j); } } return localStats; }, (a,b)-> OnLineStatistics.add(a, b)); //Now lets prepare the lower and upper bound estimates ConcurrentSkipListSet<Integer> lowerQ = new ConcurrentSkipListSet<>((Integer o1, Integer o2) -> { int cmp = Double.compare(lower_bound_est[o1], lower_bound_est[o2]); if(cmp == 0)//same bounds, but sort by identity to avoid issues cmp = o1.compareTo(o2); return cmp; }); ConcurrentSkipListSet<Integer> upperQ = new ConcurrentSkipListSet<>((Integer o1, Integer o2) -> { int cmp = Double.compare(upper_bound_est[o1], upper_bound_est[o2]); if(cmp == 0)//same bounds, but sort by identity to avoid issues cmp = o1.compareTo(o2); return cmp; }); ParallelUtils.run(parallel, N, (start, end)-> { double v = distanceStats.getVarance(); for(int i = start; i < end; i++) { int T_i = totalDistCount.get(i); double c_i = Math.sqrt(2*v*log2d/T_i); lower_bound_est[i] = totalDistSum.get(i)/T_i - c_i; upper_bound_est[i] = totalDistSum.get(i)/T_i + c_i; lowerQ.add(i); upperQ.add(i); } }); //Now lets start sampling! //how many points should we pick and sample? Not really discussed in paper- but a good idea for efficency (dont want to pay that Q cost as much as possible) /** * to-pull is how many arms we will select per iteration */ int num_to_pull; /** * to sample is how many random pairs we will pick for each pulled arm */ int samples; if(parallel) { num_to_pull = Math.max(SystemInfo.LogicalCores, 32); samples = Math.min(32, N-1); } else { num_to_pull = Math.min(32, N); samples = Math.min(32, N-1); } /** * The levers we will pull this iteration, and then add back in */ IntList to_pull = new IntList(); /** * the levers we must add back in but not update b/c they hit max evaluations and the confidence bound is tight */ IntList toAddBack = new IntList(); boolean[] isExact = new boolean[N]; Arrays.fill(isExact, false); int numExact = 0; while(numExact < N)//loop should break out before this ever happens { to_pull.clear(); toAddBack.clear(); //CONVERGENCE CEHCK if(upper_bound_est[upperQ.first()] < lower_bound_est[lowerQ.first()]) { //WE are done! return indx_map[upperQ.first()]; } while(to_pull.size() < num_to_pull) { if(lowerQ.isEmpty()) break;//we've basically evaluated everyone int i = lowerQ.pollFirst(); if(totalDistCount.get(i) >= N-1 && !isExact[i])//Lets just replace with exact value { double avg_d_i = ParallelUtils.run(parallel, N, (start, end)-> { double d = 0; for (int j = start; j < end; j++) if (i != j) d += dm.dist(indx_map[i], indx_map[j], X, accel); return d; }, (a, b)->a+b); avg_d_i /= N-1; upperQ.remove(i); lower_bound_est[i] = upper_bound_est[i] = avg_d_i; totalDistSum.set(i, avg_d_i); totalDistCount.set(i, N); isExact[i] = true; numExact++; // System.out.println("Num Exact: " + numExact); //OK, exavt value for datumn I is set. toAddBack.add(i); } if(!isExact[i]) to_pull.add(i); } //OK, lets now pull a bunch of levers / measure distances OnLineStatistics changeInStats = ParallelUtils.run(parallel, to_pull.size(), (start, end)-> { Random rand = localRand.get(); OnLineStatistics localStats = new OnLineStatistics(); for(int i_count = start; i_count < end; i_count++) { int i = to_pull.get(i_count); for(int j_count = 0; j_count < samples; j_count++) { int j = rand.nextInt(N); while(j == i) j = rand.nextInt(N); double d_ij = dm.dist(indx_map[i], indx_map[j], X, accel); localStats.add(d_ij); totalDistSum.addAndGet(i, d_ij); totalDistCount.incrementAndGet(i); if(symetric && !isExact[j]) { totalDistSum.addAndGet(j, d_ij); totalDistCount.incrementAndGet(j); } } } return localStats; }, (a,b) -> OnLineStatistics.add(a, b)); if(!to_pull.isEmpty())//might be empty if everyone went over the threshold distanceStats.add(changeInStats); //update bounds and re-insert double v = distanceStats.getVarance(); //we are only updating the bounds on the levers we pulled //that may mean some old bounds are stale //these values are exact lowerQ.addAll(toAddBack); upperQ.addAll(toAddBack); upperQ.removeAll(to_pull); for(int i : to_pull) { int T_i = totalDistCount.get(i); double c_i = Math.sqrt(2*v*log2d/T_i); lower_bound_est[i] = totalDistSum.get(i)/T_i - c_i; upper_bound_est[i] = totalDistSum.get(i)/T_i + c_i; lowerQ.add(i); upperQ.add(i); } } //We can reach this point on small N or low D datasets. Iterate and return the correct value int bestIndex = 0; for(int i = 1; i < N; i++) if(lower_bound_est[i] < lower_bound_est[bestIndex]) bestIndex = i; return bestIndex; } }
public class class_name { public static int medoid(boolean parallel, Collection<Integer> indecies, double tol, List<? extends Vec> X, DistanceMetric dm, List<Double> accel) { final int N = indecies.size(); if(tol <= 0 || N < SystemInfo.LogicalCores)//Really just not enough points, lets simplify return PAM.medoid(parallel, indecies, X, dm, accel); final double log2d = Math.log(1)-Math.log(tol); /** * Online estimate of the standard deviation that will be used */ final OnLineStatistics distanceStats; /** * This array contains the current sum of all distance computations done * for each index. Corresponds to mu in the paper. */ AtomicDoubleArray totalDistSum = new AtomicDoubleArray(N); /** * This array contains the current number of distance computations that * have been done for each feature index. Corresponds to T_i in the * paper. */ AtomicIntegerArray totalDistCount = new AtomicIntegerArray(N); final int[] indx_map = indecies.stream().mapToInt(i->i).toArray(); final boolean symetric = dm.isSymmetric(); final double[] lower_bound_est = new double[N]; final double[] upper_bound_est = new double[N]; ThreadLocal<Random> localRand = ThreadLocal.withInitial(RandomUtil::getRandom); //First pass, lets pull every "arm" (compute a dsitance) for each datumn at least once, so that we have estiamtes to work with. distanceStats = ParallelUtils.run(parallel, N, (start, end)-> { Random rand = localRand.get(); OnLineStatistics localStats = new OnLineStatistics(); for(int i = start; i < end; i++) { int j = rand.nextInt(N); while(j == i) j = rand.nextInt(N); double d_ij = dm.dist(indx_map[i], indx_map[j], X, accel); localStats.add(d_ij); // depends on control dependency: [for], data = [none] totalDistSum.addAndGet(i, d_ij); // depends on control dependency: [for], data = [i] totalDistCount.incrementAndGet(i); // depends on control dependency: [for], data = [i] if(symetric) { totalDistSum.addAndGet(j, d_ij); // depends on control dependency: [if], data = [none] totalDistCount.incrementAndGet(j); // depends on control dependency: [if], data = [none] } } return localStats; }, (a,b)-> OnLineStatistics.add(a, b)); //Now lets prepare the lower and upper bound estimates ConcurrentSkipListSet<Integer> lowerQ = new ConcurrentSkipListSet<>((Integer o1, Integer o2) -> { int cmp = Double.compare(lower_bound_est[o1], lower_bound_est[o2]); if(cmp == 0)//same bounds, but sort by identity to avoid issues cmp = o1.compareTo(o2); return cmp; }); ConcurrentSkipListSet<Integer> upperQ = new ConcurrentSkipListSet<>((Integer o1, Integer o2) -> { int cmp = Double.compare(upper_bound_est[o1], upper_bound_est[o2]); if(cmp == 0)//same bounds, but sort by identity to avoid issues cmp = o1.compareTo(o2); return cmp; }); ParallelUtils.run(parallel, N, (start, end)-> { double v = distanceStats.getVarance(); for(int i = start; i < end; i++) { int T_i = totalDistCount.get(i); double c_i = Math.sqrt(2*v*log2d/T_i); lower_bound_est[i] = totalDistSum.get(i)/T_i - c_i; upper_bound_est[i] = totalDistSum.get(i)/T_i + c_i; lowerQ.add(i); upperQ.add(i); } }); //Now lets start sampling! //how many points should we pick and sample? Not really discussed in paper- but a good idea for efficency (dont want to pay that Q cost as much as possible) /** * to-pull is how many arms we will select per iteration */ int num_to_pull; /** * to sample is how many random pairs we will pick for each pulled arm */ int samples; if(parallel) { num_to_pull = Math.max(SystemInfo.LogicalCores, 32); samples = Math.min(32, N-1); } else { num_to_pull = Math.min(32, N); samples = Math.min(32, N-1); } /** * The levers we will pull this iteration, and then add back in */ IntList to_pull = new IntList(); /** * the levers we must add back in but not update b/c they hit max evaluations and the confidence bound is tight */ IntList toAddBack = new IntList(); boolean[] isExact = new boolean[N]; Arrays.fill(isExact, false); int numExact = 0; while(numExact < N)//loop should break out before this ever happens { to_pull.clear(); toAddBack.clear(); //CONVERGENCE CEHCK if(upper_bound_est[upperQ.first()] < lower_bound_est[lowerQ.first()]) { //WE are done! return indx_map[upperQ.first()]; } while(to_pull.size() < num_to_pull) { if(lowerQ.isEmpty()) break;//we've basically evaluated everyone int i = lowerQ.pollFirst(); if(totalDistCount.get(i) >= N-1 && !isExact[i])//Lets just replace with exact value { double avg_d_i = ParallelUtils.run(parallel, N, (start, end)-> { double d = 0; for (int j = start; j < end; j++) if (i != j) d += dm.dist(indx_map[i], indx_map[j], X, accel); return d; }, (a, b)->a+b); avg_d_i /= N-1; upperQ.remove(i); lower_bound_est[i] = upper_bound_est[i] = avg_d_i; totalDistSum.set(i, avg_d_i); totalDistCount.set(i, N); isExact[i] = true; numExact++; // System.out.println("Num Exact: " + numExact); //OK, exavt value for datumn I is set. toAddBack.add(i); } if(!isExact[i]) to_pull.add(i); } //OK, lets now pull a bunch of levers / measure distances OnLineStatistics changeInStats = ParallelUtils.run(parallel, to_pull.size(), (start, end)-> { Random rand = localRand.get(); OnLineStatistics localStats = new OnLineStatistics(); for(int i_count = start; i_count < end; i_count++) { int i = to_pull.get(i_count); for(int j_count = 0; j_count < samples; j_count++) { int j = rand.nextInt(N); while(j == i) j = rand.nextInt(N); double d_ij = dm.dist(indx_map[i], indx_map[j], X, accel); localStats.add(d_ij); totalDistSum.addAndGet(i, d_ij); totalDistCount.incrementAndGet(i); if(symetric && !isExact[j]) { totalDistSum.addAndGet(j, d_ij); totalDistCount.incrementAndGet(j); } } } return localStats; }, (a,b) -> OnLineStatistics.add(a, b)); if(!to_pull.isEmpty())//might be empty if everyone went over the threshold distanceStats.add(changeInStats); //update bounds and re-insert double v = distanceStats.getVarance(); //we are only updating the bounds on the levers we pulled //that may mean some old bounds are stale //these values are exact lowerQ.addAll(toAddBack); upperQ.addAll(toAddBack); upperQ.removeAll(to_pull); for(int i : to_pull) { int T_i = totalDistCount.get(i); double c_i = Math.sqrt(2*v*log2d/T_i); lower_bound_est[i] = totalDistSum.get(i)/T_i - c_i; upper_bound_est[i] = totalDistSum.get(i)/T_i + c_i; lowerQ.add(i); upperQ.add(i); } } //We can reach this point on small N or low D datasets. Iterate and return the correct value int bestIndex = 0; for(int i = 1; i < N; i++) if(lower_bound_est[i] < lower_bound_est[bestIndex]) bestIndex = i; return bestIndex; } }
public class class_name { public void postConstruct() { Assert.isTrue(this.spacing == null || this.spacing.length == 2, GridLayer.class.getSimpleName() + ".spacing has the wrong number of elements. Expected 2 (x,y) but was: " + Arrays.toString(this.spacing)); Assert.isTrue(this.numberOfLines == null || this.numberOfLines.length == 2, GridLayer.class.getSimpleName() + ".numberOfLines has the wrong number of elements. Expected 2 (x,y) but was: " + Arrays.toString(this.numberOfLines)); Assert.isTrue(this.pointsInLine > 2, "There must be at least 2 points in a line. There were: " + this.pointsInLine); Assert.isTrue(this.indent >= 0, "The indent is not permitted to be negative: " + this.indent); Assert.isTrue(this.labelColor != null, "labelColor should not be null"); Assert.isTrue(this.haloColor != null, "haloColor should not be null"); Assert.isTrue(this.font != null, "font should not be null"); try { if (this.labelProjection != null) { if (this.longitudeFirst != null) { this.labelCRS = CRS.decode(this.labelProjection, this.longitudeFirst); } else { this.labelCRS = CRS.decode(this.labelProjection); } } } catch (FactoryException e) { throw new IllegalArgumentException("The projection code: " + this.labelProjection + " is not valid. Error message when parsing code: " + e.getMessage()); } if (this.labelFormat != null || this.valueFormat != null || this.unitFormat != null) { GridLabelFormat format = GridLabelFormat.fromConfig(this); if (format == null) { throw new IllegalArgumentException("`labelFormat` or `valueFormat` must be set"); } try { format.format(2.0, "m"); } catch (IllegalFormatException e) { throw new IllegalArgumentException("Invalid label format"); } this.gridLabelFormat = format; } } }
public class class_name { public void postConstruct() { Assert.isTrue(this.spacing == null || this.spacing.length == 2, GridLayer.class.getSimpleName() + ".spacing has the wrong number of elements. Expected 2 (x,y) but was: " + Arrays.toString(this.spacing)); Assert.isTrue(this.numberOfLines == null || this.numberOfLines.length == 2, GridLayer.class.getSimpleName() + ".numberOfLines has the wrong number of elements. Expected 2 (x,y) but was: " + Arrays.toString(this.numberOfLines)); Assert.isTrue(this.pointsInLine > 2, "There must be at least 2 points in a line. There were: " + this.pointsInLine); Assert.isTrue(this.indent >= 0, "The indent is not permitted to be negative: " + this.indent); Assert.isTrue(this.labelColor != null, "labelColor should not be null"); Assert.isTrue(this.haloColor != null, "haloColor should not be null"); Assert.isTrue(this.font != null, "font should not be null"); try { if (this.labelProjection != null) { if (this.longitudeFirst != null) { this.labelCRS = CRS.decode(this.labelProjection, this.longitudeFirst); // depends on control dependency: [if], data = [none] } else { this.labelCRS = CRS.decode(this.labelProjection); // depends on control dependency: [if], data = [none] } } } catch (FactoryException e) { throw new IllegalArgumentException("The projection code: " + this.labelProjection + " is not valid. Error message when parsing code: " + e.getMessage()); } // depends on control dependency: [catch], data = [none] if (this.labelFormat != null || this.valueFormat != null || this.unitFormat != null) { GridLabelFormat format = GridLabelFormat.fromConfig(this); if (format == null) { throw new IllegalArgumentException("`labelFormat` or `valueFormat` must be set"); } try { format.format(2.0, "m"); // depends on control dependency: [try], data = [none] } catch (IllegalFormatException e) { throw new IllegalArgumentException("Invalid label format"); } // depends on control dependency: [catch], data = [none] this.gridLabelFormat = format; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static DbPipe toDbPipe(String pipeName, final String[] array) throws DevFailed { DbPipe dbPipe = new DbPipe(pipeName); try { int index = 3; final int nbProperties = Integer.parseInt(array[index++]); for (int i=0 ; i<nbProperties ; i++) { String propertyName = array[index++]; final int nbValues = Integer.parseInt(array[index++]); String[] arrayValues = new String[nbValues]; for (int v=0 ; v<nbValues ; v++) { arrayValues[v] = array[index++]; } dbPipe.add(new DbDatum(propertyName, arrayValues)); } } catch (Exception e) { Except.throw_exception("TangoApi_SyntaxError", "Cannot convert data to DbPipe: "+e); } return dbPipe; } }
public class class_name { public static DbPipe toDbPipe(String pipeName, final String[] array) throws DevFailed { DbPipe dbPipe = new DbPipe(pipeName); try { int index = 3; final int nbProperties = Integer.parseInt(array[index++]); for (int i=0 ; i<nbProperties ; i++) { String propertyName = array[index++]; final int nbValues = Integer.parseInt(array[index++]); String[] arrayValues = new String[nbValues]; for (int v=0 ; v<nbValues ; v++) { arrayValues[v] = array[index++]; // depends on control dependency: [for], data = [v] } dbPipe.add(new DbDatum(propertyName, arrayValues)); // depends on control dependency: [for], data = [none] } } catch (Exception e) { Except.throw_exception("TangoApi_SyntaxError", "Cannot convert data to DbPipe: "+e); } return dbPipe; } }
public class class_name { public static int intValue(String option) { String s = value(option); if (s != null) { try { int val = Integer.parseInt(s); if (val > 0) return (val); } catch (NumberFormatException e) { } } return (-1); } }
public class class_name { public static int intValue(String option) { String s = value(option); if (s != null) { try { int val = Integer.parseInt(s); if (val > 0) return (val); } catch (NumberFormatException e) { } // depends on control dependency: [catch], data = [none] } return (-1); } }
public class class_name { @Override public AuditEventOutputStream write(AuditEvent event) { if (LifeCycleContext.getInstance().getStatus().equals(RunStatus.RUNNING)) { try { ValidationManager.validateEvent(event); if (configContext.getFilters().isEmpty() || filterAccepts(event)) { outputStream.write(event); } } catch (ValidationException e) { } } return this; } }
public class class_name { @Override public AuditEventOutputStream write(AuditEvent event) { if (LifeCycleContext.getInstance().getStatus().equals(RunStatus.RUNNING)) { try { ValidationManager.validateEvent(event); // depends on control dependency: [try], data = [none] if (configContext.getFilters().isEmpty() || filterAccepts(event)) { outputStream.write(event); // depends on control dependency: [if], data = [none] } } catch (ValidationException e) { } // depends on control dependency: [catch], data = [none] } return this; } }
public class class_name { public void addListeners() { super.addListeners(); if (m_properties.size() > 0) { Record recPropertiesInput = this.getMainRecord(); recPropertiesInput.setKeyArea(PropertiesInput.KEY_KEY); Iterator<Object> iterator = m_properties.keySet().iterator(); while (iterator.hasNext()) { String strKey = (String)iterator.next(); try { recPropertiesInput.addNew(); recPropertiesInput.getField(PropertiesInput.KEY).setString(strKey); if (!recPropertiesInput.seek(null)) { recPropertiesInput.addNew(); recPropertiesInput.getField(PropertiesInput.KEY).setString(strKey); recPropertiesInput.add(); } } catch (DBException ex) { ex.printStackTrace(); } } //?this.getMainRecord().getField(PropertiesInput.KEY).setEnabled(false); } } }
public class class_name { public void addListeners() { super.addListeners(); if (m_properties.size() > 0) { Record recPropertiesInput = this.getMainRecord(); recPropertiesInput.setKeyArea(PropertiesInput.KEY_KEY); // depends on control dependency: [if], data = [none] Iterator<Object> iterator = m_properties.keySet().iterator(); while (iterator.hasNext()) { String strKey = (String)iterator.next(); try { recPropertiesInput.addNew(); // depends on control dependency: [try], data = [none] recPropertiesInput.getField(PropertiesInput.KEY).setString(strKey); // depends on control dependency: [try], data = [none] if (!recPropertiesInput.seek(null)) { recPropertiesInput.addNew(); // depends on control dependency: [if], data = [none] recPropertiesInput.getField(PropertiesInput.KEY).setString(strKey); // depends on control dependency: [if], data = [none] recPropertiesInput.add(); // depends on control dependency: [if], data = [none] } } catch (DBException ex) { ex.printStackTrace(); } // depends on control dependency: [catch], data = [none] } //?this.getMainRecord().getField(PropertiesInput.KEY).setEnabled(false); } } }
public class class_name { public void marshall(GetLayerVersionRequest getLayerVersionRequest, ProtocolMarshaller protocolMarshaller) { if (getLayerVersionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getLayerVersionRequest.getLayerName(), LAYERNAME_BINDING); protocolMarshaller.marshall(getLayerVersionRequest.getVersionNumber(), VERSIONNUMBER_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetLayerVersionRequest getLayerVersionRequest, ProtocolMarshaller protocolMarshaller) { if (getLayerVersionRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getLayerVersionRequest.getLayerName(), LAYERNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getLayerVersionRequest.getVersionNumber(), VERSIONNUMBER_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected SparseArray<ReleaseItem> readChangeLog(XmlPullParser xml, boolean full) { SparseArray<ReleaseItem> result = new SparseArray<ReleaseItem>(); try { int eventType = xml.getEventType(); while (eventType != XmlPullParser.END_DOCUMENT) { if (eventType == XmlPullParser.START_TAG && xml.getName().equals(ReleaseTag.NAME)) { if (parseReleaseTag(xml, full, result)) { // Stop reading more elements if this entry is not newer than the last // version. break; } } eventType = xml.next(); } } catch (XmlPullParserException e) { Log.e(LOG_TAG, e.getMessage(), e); } catch (IOException e) { Log.e(LOG_TAG, e.getMessage(), e); } return result; } }
public class class_name { protected SparseArray<ReleaseItem> readChangeLog(XmlPullParser xml, boolean full) { SparseArray<ReleaseItem> result = new SparseArray<ReleaseItem>(); try { int eventType = xml.getEventType(); while (eventType != XmlPullParser.END_DOCUMENT) { if (eventType == XmlPullParser.START_TAG && xml.getName().equals(ReleaseTag.NAME)) { if (parseReleaseTag(xml, full, result)) { // Stop reading more elements if this entry is not newer than the last // version. break; } } eventType = xml.next(); // depends on control dependency: [while], data = [none] } } catch (XmlPullParserException e) { Log.e(LOG_TAG, e.getMessage(), e); } catch (IOException e) { // depends on control dependency: [catch], data = [none] Log.e(LOG_TAG, e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return result; } }
public class class_name { void setAsas(double[] asas1, double[] asas2, int nSpherePoints, int nThreads, int cofactorSizeToUse) { Atom[] atoms = getAtomsForAsa(cofactorSizeToUse); AsaCalculator asaCalc = new AsaCalculator(atoms, AsaCalculator.DEFAULT_PROBE_SIZE, nSpherePoints, nThreads); double[] complexAsas = asaCalc.calculateAsas(); if (complexAsas.length!=asas1.length+asas2.length) throw new IllegalArgumentException("The size of ASAs of complex doesn't match that of ASAs 1 + ASAs 2"); groupAsas1 = new TreeMap<>(); groupAsas2 = new TreeMap<>(); this.totalArea = 0; for (int i=0;i<asas1.length;i++) { Group g = atoms[i].getGroup(); if (!g.getType().equals(GroupType.HETATM) || isInChain(g)) { // interface area should be only for protein/nucleotide but not hetatoms that are not part of the chain this.totalArea += (asas1[i] - complexAsas[i]); } if (!groupAsas1.containsKey(g.getResidueNumber())) { GroupAsa groupAsa = new GroupAsa(g); groupAsa.addAtomAsaU(asas1[i]); groupAsa.addAtomAsaC(complexAsas[i]); groupAsas1.put(g.getResidueNumber(), groupAsa); } else { GroupAsa groupAsa = groupAsas1.get(g.getResidueNumber()); groupAsa.addAtomAsaU(asas1[i]); groupAsa.addAtomAsaC(complexAsas[i]); } } for (int i=0;i<asas2.length;i++) { Group g = atoms[i+asas1.length].getGroup(); if (!g.getType().equals(GroupType.HETATM) || isInChain(g)) { // interface area should be only for protein/nucleotide but not hetatoms that are not part of the chain this.totalArea += (asas2[i] - complexAsas[i+asas1.length]); } if (!groupAsas2.containsKey(g.getResidueNumber())) { GroupAsa groupAsa = new GroupAsa(g); groupAsa.addAtomAsaU(asas2[i]); groupAsa.addAtomAsaC(complexAsas[i+asas1.length]); groupAsas2.put(g.getResidueNumber(), groupAsa); } else { GroupAsa groupAsa = groupAsas2.get(g.getResidueNumber()); groupAsa.addAtomAsaU(asas2[i]); groupAsa.addAtomAsaC(complexAsas[i+asas1.length]); } } // our interface area definition: average of bsa of both molecules this.totalArea = this.totalArea/2.0; } }
public class class_name { void setAsas(double[] asas1, double[] asas2, int nSpherePoints, int nThreads, int cofactorSizeToUse) { Atom[] atoms = getAtomsForAsa(cofactorSizeToUse); AsaCalculator asaCalc = new AsaCalculator(atoms, AsaCalculator.DEFAULT_PROBE_SIZE, nSpherePoints, nThreads); double[] complexAsas = asaCalc.calculateAsas(); if (complexAsas.length!=asas1.length+asas2.length) throw new IllegalArgumentException("The size of ASAs of complex doesn't match that of ASAs 1 + ASAs 2"); groupAsas1 = new TreeMap<>(); groupAsas2 = new TreeMap<>(); this.totalArea = 0; for (int i=0;i<asas1.length;i++) { Group g = atoms[i].getGroup(); if (!g.getType().equals(GroupType.HETATM) || isInChain(g)) { // interface area should be only for protein/nucleotide but not hetatoms that are not part of the chain this.totalArea += (asas1[i] - complexAsas[i]); // depends on control dependency: [if], data = [none] } if (!groupAsas1.containsKey(g.getResidueNumber())) { GroupAsa groupAsa = new GroupAsa(g); groupAsa.addAtomAsaU(asas1[i]); // depends on control dependency: [if], data = [none] groupAsa.addAtomAsaC(complexAsas[i]); // depends on control dependency: [if], data = [none] groupAsas1.put(g.getResidueNumber(), groupAsa); // depends on control dependency: [if], data = [none] } else { GroupAsa groupAsa = groupAsas1.get(g.getResidueNumber()); groupAsa.addAtomAsaU(asas1[i]); // depends on control dependency: [if], data = [none] groupAsa.addAtomAsaC(complexAsas[i]); // depends on control dependency: [if], data = [none] } } for (int i=0;i<asas2.length;i++) { Group g = atoms[i+asas1.length].getGroup(); if (!g.getType().equals(GroupType.HETATM) || isInChain(g)) { // interface area should be only for protein/nucleotide but not hetatoms that are not part of the chain this.totalArea += (asas2[i] - complexAsas[i+asas1.length]); // depends on control dependency: [if], data = [none] } if (!groupAsas2.containsKey(g.getResidueNumber())) { GroupAsa groupAsa = new GroupAsa(g); groupAsa.addAtomAsaU(asas2[i]); // depends on control dependency: [if], data = [none] groupAsa.addAtomAsaC(complexAsas[i+asas1.length]); // depends on control dependency: [if], data = [none] groupAsas2.put(g.getResidueNumber(), groupAsa); // depends on control dependency: [if], data = [none] } else { GroupAsa groupAsa = groupAsas2.get(g.getResidueNumber()); groupAsa.addAtomAsaU(asas2[i]); // depends on control dependency: [if], data = [none] groupAsa.addAtomAsaC(complexAsas[i+asas1.length]); // depends on control dependency: [if], data = [none] } } // our interface area definition: average of bsa of both molecules this.totalArea = this.totalArea/2.0; } }
public class class_name { public void updateSecurityMetadataWithRunAs(SecurityMetadata securityMetadataFromDD, IServletConfig servletConfig) { String runAs = servletConfig.getRunAsRole(); if (runAs != null) { String servletName = servletConfig.getServletName(); //only add if there is no run-as entry in web.xml Map<String, String> servletNameToRunAsRole = securityMetadataFromDD.getRunAsMap(); if (servletNameToRunAsRole.get(servletName) == null) { servletNameToRunAsRole.put(servletName, runAs); List<String> allRoles = securityMetadataFromDD.getRoles(); if (!allRoles.contains(runAs)) { allRoles.add(runAs); } if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Added runAs role: " + runAs); } } } } }
public class class_name { public void updateSecurityMetadataWithRunAs(SecurityMetadata securityMetadataFromDD, IServletConfig servletConfig) { String runAs = servletConfig.getRunAsRole(); if (runAs != null) { String servletName = servletConfig.getServletName(); //only add if there is no run-as entry in web.xml Map<String, String> servletNameToRunAsRole = securityMetadataFromDD.getRunAsMap(); if (servletNameToRunAsRole.get(servletName) == null) { servletNameToRunAsRole.put(servletName, runAs); // depends on control dependency: [if], data = [none] List<String> allRoles = securityMetadataFromDD.getRoles(); if (!allRoles.contains(runAs)) { allRoles.add(runAs); // depends on control dependency: [if], data = [none] } if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Added runAs role: " + runAs); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public void setPersons(java.util.Collection<PersonDetection> persons) { if (persons == null) { this.persons = null; return; } this.persons = new java.util.ArrayList<PersonDetection>(persons); } }
public class class_name { public void setPersons(java.util.Collection<PersonDetection> persons) { if (persons == null) { this.persons = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.persons = new java.util.ArrayList<PersonDetection>(persons); } }
public class class_name { public static void loadMetadata(RDBMetadata metadata, Connection conn, Set<RelationID> realTables) throws SQLException { if (printouts) System.out.println("GETTING METADATA WITH " + conn + " ON " + realTables); final DatabaseMetaData md = conn.getMetaData(); String productName = md.getDatabaseProductName(); List<RelationID> seedRelationIds; QuotedIDFactory idfac = metadata.getQuotedIDFactory(); if (productName.contains("Oracle")) { String defaultSchema = getOracleDefaultOwner(conn); if (realTables == null || realTables.isEmpty()) seedRelationIds = getTableList(conn, new OracleRelationListProvider(idfac, defaultSchema), idfac); else seedRelationIds = getTableList(defaultSchema, realTables, idfac); } else { if (realTables == null || realTables.isEmpty()) { if (productName.contains("DB2")) // select CURRENT SCHEMA from SYSIBM.SYSDUMMY1 seedRelationIds = getTableListDefault(md, ImmutableSet.of("SYSTOOLS", "SYSCAT", "SYSIBM", "SYSIBMADM", "SYSSTAT"), idfac); else if (productName.contains("SQL Server")) // MS SQL Server // SELECT SCHEMA_NAME() would give default schema name // https://msdn.microsoft.com/en-us/library/ms175068.aspx seedRelationIds = getTableListDefault(md, ImmutableSet.of("sys", "INFORMATION_SCHEMA"), idfac); else // for other database engines, including H2, HSQL, PostgreSQL and MySQL seedRelationIds = getTableListDefault(md, ImmutableSet.<String>of(), idfac); } else seedRelationIds = getTableList(null, realTables, idfac); } DatatypeNormalizer dt = DefaultTypeFixer; if (productName.contains("Oracle")) dt = OracleTypeFixer; else if (productName.contains("MySQL")) dt = MySQLTypeFixer; else if (productName.contains("Microsoft SQL Server")) dt = SQLServerTypeFixer; List<DatabaseRelationDefinition> extractedRelations = new LinkedList<>(); String catalog = getCatalog(metadata, conn); for (RelationID seedId : seedRelationIds) { // the same seedId can be mapped to many tables (if the seedId has no schema) // we collect attributes from all of them DatabaseRelationDefinition currentRelation = null; // catalog is ignored for now (rs.getString("TABLE_CAT")) try (ResultSet rs = md.getColumns(catalog, seedId.getSchemaName(), seedId.getTableName(), null)) { while (rs.next()) { String schema = rs.getString("TABLE_SCHEM"); // MySQL workaround if (schema == null) schema = rs.getString("TABLE_CAT"); RelationID relationId = RelationID.createRelationIdFromDatabaseRecord(idfac, schema, rs.getString("TABLE_NAME")); QuotedID attributeId = QuotedID.createIdFromDatabaseRecord(idfac, rs.getString("COLUMN_NAME")); if (printouts) System.out.println(" " + relationId + "." + attributeId); if (currentRelation == null || !currentRelation.getID().equals(relationId)) { // switch to the next database relation currentRelation = metadata.createDatabaseRelation(relationId); extractedRelations.add(currentRelation); } // columnNoNulls, columnNullable, columnNullableUnknown boolean isNullable = rs.getInt("NULLABLE") != DatabaseMetaData.columnNoNulls; String typeName = rs.getString("TYPE_NAME"); int dataType = dt.getCorrectedDatatype(rs.getInt("DATA_TYPE"), typeName); currentRelation.addAttribute(attributeId, dataType, typeName, isNullable); } } } for (DatabaseRelationDefinition relation : extractedRelations) { getPrimaryKey(md, relation, metadata.getQuotedIDFactory()); getUniqueAttributes(md, relation, metadata.getQuotedIDFactory()); getForeignKeys(md, relation, metadata); if (printouts) { System.out.println(relation + ";"); for (UniqueConstraint uc : relation.getUniqueConstraints()) System.out.println(uc + ";"); for (ForeignKeyConstraint fk : relation.getForeignKeys()) System.out.println(fk + ";"); System.out.println(""); } } if (printouts) { System.out.println("RESULTING METADATA:\n" + metadata); System.out.println("DBMetadataExtractor END OF REPORT\n================================="); } } }
public class class_name { public static void loadMetadata(RDBMetadata metadata, Connection conn, Set<RelationID> realTables) throws SQLException { if (printouts) System.out.println("GETTING METADATA WITH " + conn + " ON " + realTables); final DatabaseMetaData md = conn.getMetaData(); String productName = md.getDatabaseProductName(); List<RelationID> seedRelationIds; QuotedIDFactory idfac = metadata.getQuotedIDFactory(); if (productName.contains("Oracle")) { String defaultSchema = getOracleDefaultOwner(conn); if (realTables == null || realTables.isEmpty()) seedRelationIds = getTableList(conn, new OracleRelationListProvider(idfac, defaultSchema), idfac); else seedRelationIds = getTableList(defaultSchema, realTables, idfac); } else { if (realTables == null || realTables.isEmpty()) { if (productName.contains("DB2")) // select CURRENT SCHEMA from SYSIBM.SYSDUMMY1 seedRelationIds = getTableListDefault(md, ImmutableSet.of("SYSTOOLS", "SYSCAT", "SYSIBM", "SYSIBMADM", "SYSSTAT"), idfac); else if (productName.contains("SQL Server")) // MS SQL Server // SELECT SCHEMA_NAME() would give default schema name // https://msdn.microsoft.com/en-us/library/ms175068.aspx seedRelationIds = getTableListDefault(md, ImmutableSet.of("sys", "INFORMATION_SCHEMA"), idfac); else // for other database engines, including H2, HSQL, PostgreSQL and MySQL seedRelationIds = getTableListDefault(md, ImmutableSet.<String>of(), idfac); } else seedRelationIds = getTableList(null, realTables, idfac); } DatatypeNormalizer dt = DefaultTypeFixer; if (productName.contains("Oracle")) dt = OracleTypeFixer; else if (productName.contains("MySQL")) dt = MySQLTypeFixer; else if (productName.contains("Microsoft SQL Server")) dt = SQLServerTypeFixer; List<DatabaseRelationDefinition> extractedRelations = new LinkedList<>(); String catalog = getCatalog(metadata, conn); for (RelationID seedId : seedRelationIds) { // the same seedId can be mapped to many tables (if the seedId has no schema) // we collect attributes from all of them DatabaseRelationDefinition currentRelation = null; // catalog is ignored for now (rs.getString("TABLE_CAT")) try (ResultSet rs = md.getColumns(catalog, seedId.getSchemaName(), seedId.getTableName(), null)) { while (rs.next()) { String schema = rs.getString("TABLE_SCHEM"); // MySQL workaround if (schema == null) schema = rs.getString("TABLE_CAT"); RelationID relationId = RelationID.createRelationIdFromDatabaseRecord(idfac, schema, rs.getString("TABLE_NAME")); QuotedID attributeId = QuotedID.createIdFromDatabaseRecord(idfac, rs.getString("COLUMN_NAME")); if (printouts) System.out.println(" " + relationId + "." + attributeId); if (currentRelation == null || !currentRelation.getID().equals(relationId)) { // switch to the next database relation currentRelation = metadata.createDatabaseRelation(relationId); // depends on control dependency: [if], data = [none] extractedRelations.add(currentRelation); // depends on control dependency: [if], data = [(currentRelation] } // columnNoNulls, columnNullable, columnNullableUnknown boolean isNullable = rs.getInt("NULLABLE") != DatabaseMetaData.columnNoNulls; String typeName = rs.getString("TYPE_NAME"); int dataType = dt.getCorrectedDatatype(rs.getInt("DATA_TYPE"), typeName); currentRelation.addAttribute(attributeId, dataType, typeName, isNullable); // depends on control dependency: [while], data = [none] } } } for (DatabaseRelationDefinition relation : extractedRelations) { getPrimaryKey(md, relation, metadata.getQuotedIDFactory()); getUniqueAttributes(md, relation, metadata.getQuotedIDFactory()); getForeignKeys(md, relation, metadata); if (printouts) { System.out.println(relation + ";"); for (UniqueConstraint uc : relation.getUniqueConstraints()) System.out.println(uc + ";"); for (ForeignKeyConstraint fk : relation.getForeignKeys()) System.out.println(fk + ";"); System.out.println(""); } } if (printouts) { System.out.println("RESULTING METADATA:\n" + metadata); System.out.println("DBMetadataExtractor END OF REPORT\n================================="); } } }
public class class_name { protected Postcard build(Uri uri) { if (null == uri || TextUtils.isEmpty(uri.toString())) { throw new HandlerException(Consts.TAG + "Parameter invalid!"); } else { PathReplaceService pService = ARouter.getInstance().navigation(PathReplaceService.class); if (null != pService) { uri = pService.forUri(uri); } return new Postcard(uri.getPath(), extractGroup(uri.getPath()), uri, null); } } }
public class class_name { protected Postcard build(Uri uri) { if (null == uri || TextUtils.isEmpty(uri.toString())) { throw new HandlerException(Consts.TAG + "Parameter invalid!"); } else { PathReplaceService pService = ARouter.getInstance().navigation(PathReplaceService.class); if (null != pService) { uri = pService.forUri(uri); // depends on control dependency: [if], data = [none] } return new Postcard(uri.getPath(), extractGroup(uri.getPath()), uri, null); // depends on control dependency: [if], data = [none] } } }
public class class_name { private Listener popListener() { Listener listener = this.listenerStack.pop(); if (!this.listenerStack.isEmpty()) { setWrappedListener(this.listenerStack.peek()); } return listener; } }
public class class_name { private Listener popListener() { Listener listener = this.listenerStack.pop(); if (!this.listenerStack.isEmpty()) { setWrappedListener(this.listenerStack.peek()); // depends on control dependency: [if], data = [none] } return listener; } }
public class class_name { public Triple[] generateTriangleNeighbors(Geometry geometry) throws TopologyException { inputTriangles = geometry; CoordinateSequenceDimensionFilter sequenceDimensionFilter = new CoordinateSequenceDimensionFilter(); geometry.apply(sequenceDimensionFilter); hasZ = sequenceDimensionFilter.getDimension() == CoordinateSequenceDimensionFilter.XYZ; Quadtree ptQuad = new Quadtree(); // In order to compute triangle neighbors we have to set a unique id to points. triangleVertex = new Triple[geometry.getNumGeometries()]; // Final size of tri vertex is not known at the moment. Give just an hint triVertex = new ArrayList<EnvelopeWithIndex>(triangleVertex.length); // First Loop make an index of triangle vertex for(int idgeom = 0; idgeom < triangleVertex.length; idgeom++) { Geometry geomItem = geometry.getGeometryN(idgeom); if(geomItem instanceof Polygon) { Coordinate[] coords = geomItem.getCoordinates(); if(coords.length != 4) { throw new TopologyException("Voronoi method accept only triangles"); } triangleVertex[idgeom] = new Triple(getOrAppendVertex(coords[0], ptQuad), getOrAppendVertex(coords[1], ptQuad), getOrAppendVertex(coords[2], ptQuad)); for(int triVertexIndex : triangleVertex[idgeom].toArray()) { triVertex.get(triVertexIndex).addSharingTriangle(idgeom); } } else { throw new TopologyException("Voronoi method accept only polygons"); } } // Second loop make an index of triangle neighbors ptQuad = null; triangleNeighbors = new Triple[geometry.getNumGeometries()]; for(int triId = 0; triId< triangleVertex.length; triId++) { Triple triangleIndex = triangleVertex[triId]; triangleNeighbors[triId] = new Triple(commonEdge(triId,triVertex.get(triangleIndex.getB()), triVertex.get(triangleIndex.getC())), commonEdge(triId,triVertex.get(triangleIndex.getA()), triVertex.get(triangleIndex.getC())), commonEdge(triId,triVertex.get(triangleIndex.getB()), triVertex.get(triangleIndex.getA()))); } triVertex.clear(); return triangleNeighbors; } }
public class class_name { public Triple[] generateTriangleNeighbors(Geometry geometry) throws TopologyException { inputTriangles = geometry; CoordinateSequenceDimensionFilter sequenceDimensionFilter = new CoordinateSequenceDimensionFilter(); geometry.apply(sequenceDimensionFilter); hasZ = sequenceDimensionFilter.getDimension() == CoordinateSequenceDimensionFilter.XYZ; Quadtree ptQuad = new Quadtree(); // In order to compute triangle neighbors we have to set a unique id to points. triangleVertex = new Triple[geometry.getNumGeometries()]; // Final size of tri vertex is not known at the moment. Give just an hint triVertex = new ArrayList<EnvelopeWithIndex>(triangleVertex.length); // First Loop make an index of triangle vertex for(int idgeom = 0; idgeom < triangleVertex.length; idgeom++) { Geometry geomItem = geometry.getGeometryN(idgeom); if(geomItem instanceof Polygon) { Coordinate[] coords = geomItem.getCoordinates(); if(coords.length != 4) { throw new TopologyException("Voronoi method accept only triangles"); } triangleVertex[idgeom] = new Triple(getOrAppendVertex(coords[0], ptQuad), getOrAppendVertex(coords[1], ptQuad), getOrAppendVertex(coords[2], ptQuad)); for(int triVertexIndex : triangleVertex[idgeom].toArray()) { triVertex.get(triVertexIndex).addSharingTriangle(idgeom); // depends on control dependency: [for], data = [triVertexIndex] } } else { throw new TopologyException("Voronoi method accept only polygons"); } } // Second loop make an index of triangle neighbors ptQuad = null; triangleNeighbors = new Triple[geometry.getNumGeometries()]; for(int triId = 0; triId< triangleVertex.length; triId++) { Triple triangleIndex = triangleVertex[triId]; triangleNeighbors[triId] = new Triple(commonEdge(triId,triVertex.get(triangleIndex.getB()), triVertex.get(triangleIndex.getC())), commonEdge(triId,triVertex.get(triangleIndex.getA()), triVertex.get(triangleIndex.getC())), commonEdge(triId,triVertex.get(triangleIndex.getB()), triVertex.get(triangleIndex.getA()))); } triVertex.clear(); return triangleNeighbors; } }
public class class_name { public void clear() { List<Socket> socketsToClear = new LinkedList<Socket>(); synchronized(multimap) { for (Socket sock : multimap.values()) { socketsToClear.add(sock); } multimap.clear(); } for (Socket sock : socketsToClear) { IOUtils.closeSocket(sock); } } }
public class class_name { public void clear() { List<Socket> socketsToClear = new LinkedList<Socket>(); synchronized(multimap) { for (Socket sock : multimap.values()) { socketsToClear.add(sock); // depends on control dependency: [for], data = [sock] } multimap.clear(); } for (Socket sock : socketsToClear) { IOUtils.closeSocket(sock); // depends on control dependency: [for], data = [sock] } } }
public class class_name { public void addDate(ICalDate icalDate, ICalProperty property, ICalParameters parameters) { if (!icalDate.hasTime()) { //dates don't have timezones return; } if (icalDate.getRawComponents().isUtc()) { //it's a UTC date, so it was already parsed under the correct timezone return; } //TODO handle UTC offsets within the date strings (not part of iCal standard) String tzid = parameters.getTimezoneId(); if (tzid == null) { addFloatingDate(property, icalDate); } else { addTimezonedDate(tzid, property, icalDate); } } }
public class class_name { public void addDate(ICalDate icalDate, ICalProperty property, ICalParameters parameters) { if (!icalDate.hasTime()) { //dates don't have timezones return; // depends on control dependency: [if], data = [none] } if (icalDate.getRawComponents().isUtc()) { //it's a UTC date, so it was already parsed under the correct timezone return; // depends on control dependency: [if], data = [none] } //TODO handle UTC offsets within the date strings (not part of iCal standard) String tzid = parameters.getTimezoneId(); if (tzid == null) { addFloatingDate(property, icalDate); // depends on control dependency: [if], data = [none] } else { addTimezonedDate(tzid, property, icalDate); // depends on control dependency: [if], data = [(tzid] } } }
public class class_name { private void writeSilenceInternal(TickRange tr, boolean forced) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "writeSilenceInternal", new Object[] { tr, Boolean.valueOf(forced) }); List msgList = null; // Take lock on target stream and hold it until messages have been // added to batch by deliverOrderedMessages synchronized (this) { // We are only allowed to remove a message from the stream and replace // it with Silence if it has not yet been added to a Batch to be delivered // We know that this can't happen asynchronously once we hold the targetStream lock // and the nextCompletedPrefix tells us the last message in the current batch if (!forced || tr.valuestamp > this.nextCompletedPrefix) { synchronized (oststream) { // Get the completedPrefix long completedPrefix = oststream.getCompletedPrefix(); // check if all ticks in Silence msg are already acked as if // so they will be changed to Completed anyway and we don't need // to do it now if (tr.endstamp > completedPrefix) { if (forced == false) oststream.writeRange(tr); else oststream.writeCompletedRangeForced(tr); // Get updated completedPrefix completedPrefix = oststream.getCompletedPrefix(); } // The completedPrefix may have advanced when the Silence message // was writen to the stream or both if ((completedPrefix + 1) > doubtHorizon) { // advance the doubt horizon doubtHorizon = completedPrefix + 1; // now call the method which trys to advance the doubtHorizon // from it's current setting by moving over any ticks in Completed // state and sending Values messages until it reaches a tick in // Unknown or Requested msgList = advanceDoubtHorizon(null); } if ((doubtHorizon - 1) > unknownHorizon) unknownHorizon = doubtHorizon - 1; // the message was writen to the stream // see if this message created a gap if (tr.endstamp > unknownHorizon) { // check if gap created if (tr.startstamp > (unknownHorizon + 1)) { handleNewGap(unknownHorizon + 1, tr.startstamp - 1); } unknownHorizon = tr.endstamp; } // Reset the health state if (lastNackTick >= 0) { if (tr.startstamp <= lastNackTick && tr.endstamp >= lastNackTick) { getControlAdapter().getHealthState().updateHealth(HealthStateListener.MSG_LOST_ERROR_STATE, HealthState.GREEN); lastNackTick = -1; } } // If the stream is blocked, see if we have a silence for the blocking tick if (isStreamBlocked()) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Stream is blocked on tick: " + linkBlockingTick + ", see if we can unblock it"); if (tr.endstamp >= linkBlockingTick) { if (tr.startstamp <= linkBlockingTick) { // The stream is no longer blocked setStreamIsBlocked(false, DestinationHandler.OUTPUT_HANDLER_FOUND, null, null); } } } // eof isStreamBlocked() } // end sync ( release the oststream lock ) // Deliver messages outside of synchronise // We do this because deliverOrderedMessages takes the // BatchHandler lock and the BatchHandler callbacks require // the stream lock to update the completedPrefix. If we call // the BatchHandler when we hold the stream lock it could cause // a deadlock if (msgList != null) { // Call the Input or Output Handler to deliver the messages try { deliverer.deliverOrderedMessages(msgList, this, priority, reliability); } catch (SINotPossibleInCurrentConfigurationException e) { // No FFDC code needed SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "writeSilenceInternal", "GDException"); // Dont rethrow the exception. The GD protocols will handle the resend of // the message } catch (SIException e) { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream.writeSilenceInternal", "1:960:1.110", this); SibTr.exception(tc, e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream", "1:967:1.110", e }); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "writeSilenceInternal", "GDException"); throw new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream", "1:977:1.110", e }, null), e); } } } // else can't remove as already in batch } // end sync - release lock on target stream if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "writeSilenceInternal"); } }
public class class_name { private void writeSilenceInternal(TickRange tr, boolean forced) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(this, tc, "writeSilenceInternal", new Object[] { tr, Boolean.valueOf(forced) }); List msgList = null; // Take lock on target stream and hold it until messages have been // added to batch by deliverOrderedMessages synchronized (this) { // We are only allowed to remove a message from the stream and replace // it with Silence if it has not yet been added to a Batch to be delivered // We know that this can't happen asynchronously once we hold the targetStream lock // and the nextCompletedPrefix tells us the last message in the current batch if (!forced || tr.valuestamp > this.nextCompletedPrefix) { synchronized (oststream) // depends on control dependency: [if], data = [none] { // Get the completedPrefix long completedPrefix = oststream.getCompletedPrefix(); // check if all ticks in Silence msg are already acked as if // so they will be changed to Completed anyway and we don't need // to do it now if (tr.endstamp > completedPrefix) { if (forced == false) oststream.writeRange(tr); else oststream.writeCompletedRangeForced(tr); // Get updated completedPrefix completedPrefix = oststream.getCompletedPrefix(); // depends on control dependency: [if], data = [none] } // The completedPrefix may have advanced when the Silence message // was writen to the stream or both if ((completedPrefix + 1) > doubtHorizon) { // advance the doubt horizon doubtHorizon = completedPrefix + 1; // depends on control dependency: [if], data = [none] // now call the method which trys to advance the doubtHorizon // from it's current setting by moving over any ticks in Completed // state and sending Values messages until it reaches a tick in // Unknown or Requested msgList = advanceDoubtHorizon(null); // depends on control dependency: [if], data = [none] } if ((doubtHorizon - 1) > unknownHorizon) unknownHorizon = doubtHorizon - 1; // the message was writen to the stream // see if this message created a gap if (tr.endstamp > unknownHorizon) { // check if gap created if (tr.startstamp > (unknownHorizon + 1)) { handleNewGap(unknownHorizon + 1, tr.startstamp - 1); // depends on control dependency: [if], data = [none] } unknownHorizon = tr.endstamp; // depends on control dependency: [if], data = [none] } // Reset the health state if (lastNackTick >= 0) { if (tr.startstamp <= lastNackTick && tr.endstamp >= lastNackTick) { getControlAdapter().getHealthState().updateHealth(HealthStateListener.MSG_LOST_ERROR_STATE, HealthState.GREEN); // depends on control dependency: [if], data = [none] lastNackTick = -1; // depends on control dependency: [if], data = [none] } } // If the stream is blocked, see if we have a silence for the blocking tick if (isStreamBlocked()) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "Stream is blocked on tick: " + linkBlockingTick + ", see if we can unblock it"); if (tr.endstamp >= linkBlockingTick) { if (tr.startstamp <= linkBlockingTick) { // The stream is no longer blocked setStreamIsBlocked(false, DestinationHandler.OUTPUT_HANDLER_FOUND, null, null); // depends on control dependency: [if], data = [none] } } } // eof isStreamBlocked() } // end sync ( release the oststream lock ) // Deliver messages outside of synchronise // We do this because deliverOrderedMessages takes the // BatchHandler lock and the BatchHandler callbacks require // the stream lock to update the completedPrefix. If we call // the BatchHandler when we hold the stream lock it could cause // a deadlock if (msgList != null) { // Call the Input or Output Handler to deliver the messages try { deliverer.deliverOrderedMessages(msgList, this, priority, reliability); // depends on control dependency: [try], data = [none] } catch (SINotPossibleInCurrentConfigurationException e) { // No FFDC code needed SibTr.exception(tc, e); if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) SibTr.debug(tc, "writeSilenceInternal", "GDException"); // Dont rethrow the exception. The GD protocols will handle the resend of // the message } catch (SIException e) // depends on control dependency: [catch], data = [none] { // FFDC FFDCFilter.processException( e, "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream.writeSilenceInternal", "1:960:1.110", this); SibTr.exception(tc, e); SibTr.error(tc, "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream", "1:967:1.110", e }); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "writeSilenceInternal", "GDException"); throw new SIErrorException( nls.getFormattedMessage( "INTERNAL_MESSAGING_ERROR_CWSIP0002", new Object[] { "com.ibm.ws.sib.processor.gd.GuaranteedTargetStream", "1:977:1.110", e }, null), e); } // depends on control dependency: [catch], data = [none] } } // else can't remove as already in batch } // end sync - release lock on target stream if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "writeSilenceInternal"); } }
public class class_name { public int getInt(int key, int defaultValue) { if (key < 0) Kit.codeBug(); int index = findIndex(key); if (0 <= index) { if (ivaluesShift != 0) { return keys[ivaluesShift + index]; } return 0; } return defaultValue; } }
public class class_name { public int getInt(int key, int defaultValue) { if (key < 0) Kit.codeBug(); int index = findIndex(key); if (0 <= index) { if (ivaluesShift != 0) { return keys[ivaluesShift + index]; // depends on control dependency: [if], data = [none] } return 0; // depends on control dependency: [if], data = [none] } return defaultValue; } }
public class class_name { private static int searchNzRowsInX_DFS(int rowB , DMatrixSparseCSC G , int top , int pinv[], int xi[], int w[] ) { int N = G.numCols; // first N elements in w is the length of X int head = 0; // put the selected row into the FILO stack xi[head] = rowB; // use the head of xi to store where the stack it's searching. The tail is where // the graph ordered list of rows in B is stored. while( head >= 0 ) { // the column in G being examined int G_col = xi[head]; int G_col_new = pinv != null ? pinv[G_col] : G_col; if( w[G_col] == 0) { w[G_col] = 1; // mark which child in the loop below it's examining w[N+head] = G_col_new < 0 || G_col_new >= N ? 0 : G.col_idx[G_col_new]; } // See if there are any children which have yet to be examined boolean done = true; // The Right side after || is used to handle tall matrices. There will be no nodes matching int idx0 = w[N+head]; int idx1 = G_col_new < 0 || G_col_new >= N ? 0 : G.col_idx[G_col_new+1]; for (int j = idx0; j < idx1; j++) { int jrow = G.nz_rows[j]; if( jrow < N && w[jrow] == 0 ) { w[N+head] = j+1; // mark that it has processed up to this point xi[++head] = jrow; done = false; break; // It's a DFS so break and continue down } } if( done ) { head--; xi[--top] = G_col; } } return top; } }
public class class_name { private static int searchNzRowsInX_DFS(int rowB , DMatrixSparseCSC G , int top , int pinv[], int xi[], int w[] ) { int N = G.numCols; // first N elements in w is the length of X int head = 0; // put the selected row into the FILO stack xi[head] = rowB; // use the head of xi to store where the stack it's searching. The tail is where // the graph ordered list of rows in B is stored. while( head >= 0 ) { // the column in G being examined int G_col = xi[head]; int G_col_new = pinv != null ? pinv[G_col] : G_col; if( w[G_col] == 0) { w[G_col] = 1; // depends on control dependency: [if], data = [none] // mark which child in the loop below it's examining w[N+head] = G_col_new < 0 || G_col_new >= N ? 0 : G.col_idx[G_col_new]; // depends on control dependency: [if], data = [none] } // See if there are any children which have yet to be examined boolean done = true; // The Right side after || is used to handle tall matrices. There will be no nodes matching int idx0 = w[N+head]; int idx1 = G_col_new < 0 || G_col_new >= N ? 0 : G.col_idx[G_col_new+1]; for (int j = idx0; j < idx1; j++) { int jrow = G.nz_rows[j]; if( jrow < N && w[jrow] == 0 ) { w[N+head] = j+1; // mark that it has processed up to this point // depends on control dependency: [if], data = [none] xi[++head] = jrow; // depends on control dependency: [if], data = [none] done = false; // depends on control dependency: [if], data = [none] break; // It's a DFS so break and continue down } } if( done ) { head--; // depends on control dependency: [if], data = [none] xi[--top] = G_col; // depends on control dependency: [if], data = [none] } } return top; } }
public class class_name { String buildProjectionExpression(SubstitutionContext context) { if (projections.size() == 0) return null; StringBuilder sb = new StringBuilder(); for (PathOperand projection : projections) { if (sb.length() > 0) sb.append(", "); sb.append(projection.asSubstituted(context)); } return sb.toString(); } }
public class class_name { String buildProjectionExpression(SubstitutionContext context) { if (projections.size() == 0) return null; StringBuilder sb = new StringBuilder(); for (PathOperand projection : projections) { if (sb.length() > 0) sb.append(", "); sb.append(projection.asSubstituted(context)); // depends on control dependency: [for], data = [projection] } return sb.toString(); } }
public class class_name { protected HttpRequestMessageImpl getRequestImpl() { if (null == getMyRequest()) { setMyRequest(getObjectFactory().getRequest(this)); getMyRequest().setHeaderChangeLimit(getHttpConfig().getHeaderChangeLimit()); } setStartTime(); return getMyRequest(); } }
public class class_name { protected HttpRequestMessageImpl getRequestImpl() { if (null == getMyRequest()) { setMyRequest(getObjectFactory().getRequest(this)); // depends on control dependency: [if], data = [none] getMyRequest().setHeaderChangeLimit(getHttpConfig().getHeaderChangeLimit()); // depends on control dependency: [if], data = [none] } setStartTime(); return getMyRequest(); } }
public class class_name { protected T createInstance() { try { return targetClass.newInstance(); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } }
public class class_name { protected T createInstance() { try { return targetClass.newInstance(); // depends on control dependency: [try], data = [none] } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none] throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public String getSrcSet() { StringBuffer result = new StringBuffer(128); if (m_srcSet != null) { int items = m_srcSet.size(); for (Map.Entry<Integer, CmsJspImageBean> entry : m_srcSet.entrySet()) { CmsJspImageBean imageBean = entry.getValue(); // append the image source result.append(imageBean.getSrcUrl()); result.append(" "); // append width result.append(imageBean.getScaler().getWidth()); result.append("w"); if (--items > 0) { result.append(", "); } } } return result.toString(); } }
public class class_name { public String getSrcSet() { StringBuffer result = new StringBuffer(128); if (m_srcSet != null) { int items = m_srcSet.size(); for (Map.Entry<Integer, CmsJspImageBean> entry : m_srcSet.entrySet()) { CmsJspImageBean imageBean = entry.getValue(); // append the image source result.append(imageBean.getSrcUrl()); // depends on control dependency: [for], data = [none] result.append(" "); // depends on control dependency: [for], data = [none] // append width result.append(imageBean.getScaler().getWidth()); // depends on control dependency: [for], data = [none] result.append("w"); // depends on control dependency: [for], data = [none] if (--items > 0) { result.append(", "); // depends on control dependency: [if], data = [none] } } } return result.toString(); } }
public class class_name { public void cancel(int entryId) { NotificationEntry entry = mCenter.getEntry(ID, entryId); if (entry != null) { cancel(entry); } } }
public class class_name { public void cancel(int entryId) { NotificationEntry entry = mCenter.getEntry(ID, entryId); if (entry != null) { cancel(entry); // depends on control dependency: [if], data = [(entry] } } }
public class class_name { @Override protected void checkPrimitiveValidity() { BusPrimitiveInvalidity invalidityReason = null; if (this.position == null) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.NO_STOP_POSITION, null); } else { final BusNetwork busNetwork = getBusNetwork(); if (busNetwork == null) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.STOP_NOT_IN_NETWORK, null); } else { final RoadNetwork roadNetwork = busNetwork.getRoadNetwork(); if (roadNetwork == null) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.STOP_NOT_IN_NETWORK, null); } else { final Rectangle2d bounds = roadNetwork.getBoundingBox(); if (bounds == null || !bounds.contains(this.position.getPoint())) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.OUTSIDE_MAP_BOUNDS, bounds == null ? null : bounds.toString()); } } } } setPrimitiveValidity(invalidityReason); } }
public class class_name { @Override protected void checkPrimitiveValidity() { BusPrimitiveInvalidity invalidityReason = null; if (this.position == null) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.NO_STOP_POSITION, null); // depends on control dependency: [if], data = [none] } else { final BusNetwork busNetwork = getBusNetwork(); if (busNetwork == null) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.STOP_NOT_IN_NETWORK, null); // depends on control dependency: [if], data = [none] } else { final RoadNetwork roadNetwork = busNetwork.getRoadNetwork(); if (roadNetwork == null) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.STOP_NOT_IN_NETWORK, null); // depends on control dependency: [if], data = [none] } else { final Rectangle2d bounds = roadNetwork.getBoundingBox(); if (bounds == null || !bounds.contains(this.position.getPoint())) { invalidityReason = new BusPrimitiveInvalidity( BusPrimitiveInvalidityType.OUTSIDE_MAP_BOUNDS, bounds == null ? null : bounds.toString()); // depends on control dependency: [if], data = [none] } } } } setPrimitiveValidity(invalidityReason); } }
public class class_name { static void parseArguments(String[] args) { for (AbstractH2OExtension e : extManager.getCoreExtensions()) { args = e.parseArguments(args); } parseH2OArgumentsTo(args, ARGS); } }
public class class_name { static void parseArguments(String[] args) { for (AbstractH2OExtension e : extManager.getCoreExtensions()) { args = e.parseArguments(args); // depends on control dependency: [for], data = [e] } parseH2OArgumentsTo(args, ARGS); } }
public class class_name { protected void validateValue(FacesContext context, Object value) { super.validateValue(context, value); // Skip validation if it is not necessary if (!isValid() || (value == null)) { return; } boolean doAddMessage = false; // Ensure that the values match one of the available options // Don't arrays cast to "Object[]", as we may now be using an array // of primitives Converter converter = getConverter(); for (Iterator i = getValuesIterator(value); i.hasNext(); ) { Iterator items = new SelectItemsIterator(context, this); Object currentValue = i.next(); if (!SelectUtils.matchValue(context, this, currentValue, items, converter)) { doAddMessage = true; break; } } // Ensure that if the value is noSelection and a // value is required, a message is queued if (isRequired()) { for (Iterator i = getValuesIterator(value); i.hasNext();) { Iterator items = new SelectItemsIterator(context, this); Object currentValue = i.next(); if (SelectUtils.valueIsNoSelectionOption(context, this, currentValue, items, converter)) { doAddMessage = true; break; } } } if (doAddMessage) { // Enqueue an error message if an invalid value was specified FacesMessage message = MessageFactory.getMessage(context, INVALID_MESSAGE_ID, MessageFactory.getLabel(context, this)); context.addMessage(getClientId(context), message); setValid(false); } } }
public class class_name { protected void validateValue(FacesContext context, Object value) { super.validateValue(context, value); // Skip validation if it is not necessary if (!isValid() || (value == null)) { return; // depends on control dependency: [if], data = [none] } boolean doAddMessage = false; // Ensure that the values match one of the available options // Don't arrays cast to "Object[]", as we may now be using an array // of primitives Converter converter = getConverter(); for (Iterator i = getValuesIterator(value); i.hasNext(); ) { Iterator items = new SelectItemsIterator(context, this); Object currentValue = i.next(); if (!SelectUtils.matchValue(context, this, currentValue, items, converter)) { doAddMessage = true; // depends on control dependency: [if], data = [none] break; } } // Ensure that if the value is noSelection and a // value is required, a message is queued if (isRequired()) { for (Iterator i = getValuesIterator(value); i.hasNext();) { Iterator items = new SelectItemsIterator(context, this); Object currentValue = i.next(); if (SelectUtils.valueIsNoSelectionOption(context, this, currentValue, items, converter)) { doAddMessage = true; // depends on control dependency: [if], data = [none] break; } } } if (doAddMessage) { // Enqueue an error message if an invalid value was specified FacesMessage message = MessageFactory.getMessage(context, INVALID_MESSAGE_ID, MessageFactory.getLabel(context, this)); context.addMessage(getClientId(context), message); // depends on control dependency: [if], data = [none] setValid(false); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public final synchronized void beforeCompletion() throws RemoteException { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "beforeCompletion: " + StateStrs[state]); if (removed) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "beforeCompletion: removed"); return; } switch (state) { // These are the "normal" commit points case TX_METHOD_READY: setState(COMMITTING_OUTSIDE_METHOD); // Session Synchronization BeforeCompletion needs to be called if either // the annotation (or XML) was specified or the bean implemented // the interface and a global transaction is active. F743-25855 if (ivBeforeCompletion != null || sessionSync != null) { ContainerTx tx = container.getActiveContainerTx(); // SessionSynchronization beforeCompletion is only called for // global transactions that are NOT being rolledback... and // it is skipped if a remove method (@Remove) was called. 390657 if ((tx != null) && (tx.isTransactionGlobal()) && (!tx.getRollbackOnly()) && (tx.ivRemoveBeanO != this)) // 390657 { EJBThreadData threadData = EJSContainer.getThreadData(); threadData.pushContexts(this); try { if (ivBeforeCompletion != null) { invokeSessionSynchMethod(ivBeforeCompletion, null); } else { sessionSync.beforeCompletion(); } } finally { threadData.popContexts(); } } } break; // Could happen during async rollback (i.e. a second thread is // rolling back the tran, as the current thread is committing it. case IN_METHOD: case METHOD_READY: case AFTER_COMPLETION: // d159152 case DESTROYED: if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "beforeCompletion: asynch rollback: " + getStateName(state)); return; default: throw new InvalidBeanOStateException(getStateName(state), "TX_METHOD_READY | TX_IN_METHOD | " + "METHOD_READY | DESTROYED"); } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "beforeCompletion: " + getStateName(state)); } }
public class class_name { @Override public final synchronized void beforeCompletion() throws RemoteException { final boolean isTraceOn = TraceComponent.isAnyTracingEnabled(); if (isTraceOn && tc.isEntryEnabled()) Tr.entry(tc, "beforeCompletion: " + StateStrs[state]); if (removed) { if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "beforeCompletion: removed"); return; } switch (state) { // These are the "normal" commit points case TX_METHOD_READY: setState(COMMITTING_OUTSIDE_METHOD); // Session Synchronization BeforeCompletion needs to be called if either // the annotation (or XML) was specified or the bean implemented // the interface and a global transaction is active. F743-25855 if (ivBeforeCompletion != null || sessionSync != null) { ContainerTx tx = container.getActiveContainerTx(); // SessionSynchronization beforeCompletion is only called for // global transactions that are NOT being rolledback... and // it is skipped if a remove method (@Remove) was called. 390657 if ((tx != null) && (tx.isTransactionGlobal()) && (!tx.getRollbackOnly()) && (tx.ivRemoveBeanO != this)) // 390657 { EJBThreadData threadData = EJSContainer.getThreadData(); threadData.pushContexts(this); // depends on control dependency: [if], data = [none] try { if (ivBeforeCompletion != null) { invokeSessionSynchMethod(ivBeforeCompletion, null); // depends on control dependency: [if], data = [(ivBeforeCompletion] } else { sessionSync.beforeCompletion(); // depends on control dependency: [if], data = [none] } } finally { threadData.popContexts(); } } } break; // Could happen during async rollback (i.e. a second thread is // rolling back the tran, as the current thread is committing it. case IN_METHOD: case METHOD_READY: case AFTER_COMPLETION: // d159152 case DESTROYED: if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "beforeCompletion: asynch rollback: " + getStateName(state)); return; default: throw new InvalidBeanOStateException(getStateName(state), "TX_METHOD_READY | TX_IN_METHOD | " + "METHOD_READY | DESTROYED"); } if (isTraceOn && tc.isEntryEnabled()) Tr.exit(tc, "beforeCompletion: " + getStateName(state)); } }
public class class_name { public void setAssociationFilterList(java.util.Collection<AssociationFilter> associationFilterList) { if (associationFilterList == null) { this.associationFilterList = null; return; } this.associationFilterList = new com.amazonaws.internal.SdkInternalList<AssociationFilter>(associationFilterList); } }
public class class_name { public void setAssociationFilterList(java.util.Collection<AssociationFilter> associationFilterList) { if (associationFilterList == null) { this.associationFilterList = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.associationFilterList = new com.amazonaws.internal.SdkInternalList<AssociationFilter>(associationFilterList); } }
public class class_name { public synchronized boolean receive(Tree message) { // Update timestamp if (timeoutMillis > 0) { timeoutAt = System.currentTimeMillis() + timeoutMillis; } // Check sequence number long seq = message.get("seq", -1); if (seq > -1) { if (seq - 1 == prevSeq) { prevSeq = seq; } else { // Process later pool.put(seq, message); return false; } } else { prevSeq = -1; } // Process current message boolean close = processMessage(message); // Process pooled messages long nextSeq = prevSeq; while (true) { Tree nextMessage = pool.remove(++nextSeq); if (nextMessage == null) { break; } prevSeq = nextSeq; if (processMessage(nextMessage)) { close = true; } } // True = remove stream from registry return close; } }
public class class_name { public synchronized boolean receive(Tree message) { // Update timestamp if (timeoutMillis > 0) { timeoutAt = System.currentTimeMillis() + timeoutMillis; // depends on control dependency: [if], data = [none] } // Check sequence number long seq = message.get("seq", -1); if (seq > -1) { if (seq - 1 == prevSeq) { prevSeq = seq; // depends on control dependency: [if], data = [none] } else { // Process later pool.put(seq, message); // depends on control dependency: [if], data = [none] return false; // depends on control dependency: [if], data = [none] } } else { prevSeq = -1; // depends on control dependency: [if], data = [none] } // Process current message boolean close = processMessage(message); // Process pooled messages long nextSeq = prevSeq; while (true) { Tree nextMessage = pool.remove(++nextSeq); if (nextMessage == null) { break; } prevSeq = nextSeq; // depends on control dependency: [while], data = [none] if (processMessage(nextMessage)) { close = true; // depends on control dependency: [if], data = [none] } } // True = remove stream from registry return close; } }
public class class_name { public static StoreDefinition getStoreDefinitionWithName(List<StoreDefinition> storeDefs, String storeName) { StoreDefinition def = null; for(StoreDefinition storeDef: storeDefs) { if(storeDef.getName().compareTo(storeName) == 0) { def = storeDef; break; } } if(def == null) { throw new VoldemortException("Could not find store " + storeName); } return def; } }
public class class_name { public static StoreDefinition getStoreDefinitionWithName(List<StoreDefinition> storeDefs, String storeName) { StoreDefinition def = null; for(StoreDefinition storeDef: storeDefs) { if(storeDef.getName().compareTo(storeName) == 0) { def = storeDef; // depends on control dependency: [if], data = [none] break; } } if(def == null) { throw new VoldemortException("Could not find store " + storeName); } return def; } }
public class class_name { public static void simplify(Throwable e) { if (!isSimplifyStackTrace()) { return; } if (e.getCause() != null) { simplify(e.getCause()); } StackTraceElement[] trace = e.getStackTrace(); if (trace == null || trace.length == 0) { return; } List<StackTraceElement> simpleTrace = new ArrayList<StackTraceElement>( trace.length); simpleTrace.add(trace[0]); // Remove unnecessary stack trace elements. for (int i = 1; i < trace.length; i++) { if (EXCLUDED_STACK_TRACE.matcher(trace[i].getClassName()).matches()) { continue; } simpleTrace.add(trace[i]); } e.setStackTrace(simpleTrace.toArray(new StackTraceElement[simpleTrace .size()])); } }
public class class_name { public static void simplify(Throwable e) { if (!isSimplifyStackTrace()) { return; // depends on control dependency: [if], data = [none] } if (e.getCause() != null) { simplify(e.getCause()); // depends on control dependency: [if], data = [(e.getCause()] } StackTraceElement[] trace = e.getStackTrace(); if (trace == null || trace.length == 0) { return; // depends on control dependency: [if], data = [none] } List<StackTraceElement> simpleTrace = new ArrayList<StackTraceElement>( trace.length); simpleTrace.add(trace[0]); // Remove unnecessary stack trace elements. for (int i = 1; i < trace.length; i++) { if (EXCLUDED_STACK_TRACE.matcher(trace[i].getClassName()).matches()) { continue; } simpleTrace.add(trace[i]); // depends on control dependency: [for], data = [i] } e.setStackTrace(simpleTrace.toArray(new StackTraceElement[simpleTrace .size()])); } }
public class class_name { final private com.ibm.websphere.cache.CacheEntry internal_putAndGet( Object key, Object value, Object userMetaData, int priority, int timeToLive, int inactivityTime, int sharingPolicy, Object dependencyIds[], Object aliasIds[], boolean incrementRefCount, boolean skipMemoryAndWriteToDisk) throws DynamicCacheException { final String methodName = "putAndGet(..)"; ValidateUtility.objectNotNull(key, "key", value, "value"); if (skipMemoryAndWriteToDisk && !cache.getSwapToDisk()) { throw new DiskOffloadNotEnabledException("DiskOffloadNotEnabledException occurred. The disk offload feature for cache instance \"" + cache.getCacheName() + "\" is not enabled."); } if (skipMemoryAndWriteToDisk == true && sharingPolicy != EntryInfo.NOT_SHARED) { //DYNA1072W=DYNA1072W: The cache id \"{0}\" will not be replicated to other servers because \"skipMemoryWriteToDisk\" is set to true. The sharing policy will be set to \"not-shared\". Tr.warning(tc, "DYNA1072W", new Object[] { key }); sharingPolicy = EntryInfo.NOT_SHARED; } ValidateUtility.sharingPolicy(sharingPolicy); if (sharingPolicy == EntryInfo.SHARED_PUSH_PULL || sharingPolicy == EntryInfo.SHARED_PULL) { invalidate(key, true); } EntryInfo ei = entryInfoPool.allocate(key, dependencyIds, aliasIds); ei.setUserMetaData(userMetaData); ei.setPriority(priority); ei.setTimeLimit(timeToLive); ei.setSharingPolicy(sharingPolicy); ei.setInactivity(inactivityTime); CacheEntry ce = cacheEntryPool.allocate(); ce.copyMetaData(ei); ce.setValue(value); ce.setSkipMemoryAndWriteToDisk(skipMemoryAndWriteToDisk); if (value instanceof DistributedNioMapObject) { ce.useByteBuffer = true; } // only put and no get for when skipMemoryAndWriteToDisk is true ==> refcount will not increment com.ibm.websphere.cache.CacheEntry newEntry = cache.setEntry(ce, CachePerf.LOCAL, false, Cache.COORDINATE, incrementRefCount); ei.returnToPool(); try { if (cache.getCacheConfig().isDefaultCacheProvider()) { if (tc.isDebugEnabled()) { if (newEntry != null) { Tr.debug(tc, methodName + " " + cache.getCacheName() + " id=" + newEntry.getIdObject() + " incRefCount=" + incrementRefCount + " skipMemoryWriteToDisk=" + skipMemoryAndWriteToDisk); } } if (ce.skipMemoryAndWriteToDisk) { int errorCode = ce.skipMemoryAndWriteToDiskErrorCode; StringBuffer message = new StringBuffer(); Exception ex = null; switch (errorCode) { case HTODDynacache.NO_EXCEPTION: break; case HTODDynacache.DISK_EXCEPTION: message.append("The disk IO exception has occurred when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); ex = cache.getDiskCacheException(); if (ex != null) { message.append(ex.getMessage()); } throw new DiskIOException(message.toString()); case HTODDynacache.DISK_SIZE_OVER_LIMIT_EXCEPTION: message.append("Exception has occurred either (1) there is no disk space available, or (2) the disk cache size in GB s over the (diskCacheSizeInGB) limit when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); ex = cache.getDiskCacheException(); if (ex != null) { message.append(ex.getMessage()); } throw new DiskSizeOverLimitException(message.toString()); case HTODDynacache.OTHER_EXCEPTION: message.append("The runtime exception other than a Disk IOException has occurred when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); ex = cache.getDiskCacheException(); if (ex != null) { message.append(ex.getMessage()); } throw new MiscellaneousException(message.toString()); case HTODDynacache.SERIALIZATION_EXCEPTION: message.append("The serialization exception has occurred when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); throw new SerializationException(message.toString()); case HTODDynacache.DISK_SIZE_IN_ENTRIES_OVER_LIMIT_EXCEPTION: message.append("The disk cache size in entries is over the (diskCacheSize) limit when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); throw new DiskSizeInEntriesOverLimitException(message.toString()); case HTODDynacache.DISK_CACHE_ENTRY_SIZE_OVER_LIMIT_EXCEPTION: message.append("The cache entry size is over the configured disk cache entry size (diskCacheEntrySizeInMB) limit when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); throw new DiskCacheEntrySizeOverLimitException(message.toString()); } } } } finally { ce.value = ce.serializedValue = null; //prevent ce.reset from Nio object release ce.returnToPool(); } return newEntry; } }
public class class_name { final private com.ibm.websphere.cache.CacheEntry internal_putAndGet( Object key, Object value, Object userMetaData, int priority, int timeToLive, int inactivityTime, int sharingPolicy, Object dependencyIds[], Object aliasIds[], boolean incrementRefCount, boolean skipMemoryAndWriteToDisk) throws DynamicCacheException { final String methodName = "putAndGet(..)"; ValidateUtility.objectNotNull(key, "key", value, "value"); if (skipMemoryAndWriteToDisk && !cache.getSwapToDisk()) { throw new DiskOffloadNotEnabledException("DiskOffloadNotEnabledException occurred. The disk offload feature for cache instance \"" + cache.getCacheName() + "\" is not enabled."); } if (skipMemoryAndWriteToDisk == true && sharingPolicy != EntryInfo.NOT_SHARED) { //DYNA1072W=DYNA1072W: The cache id \"{0}\" will not be replicated to other servers because \"skipMemoryWriteToDisk\" is set to true. The sharing policy will be set to \"not-shared\". Tr.warning(tc, "DYNA1072W", new Object[] { key }); sharingPolicy = EntryInfo.NOT_SHARED; } ValidateUtility.sharingPolicy(sharingPolicy); if (sharingPolicy == EntryInfo.SHARED_PUSH_PULL || sharingPolicy == EntryInfo.SHARED_PULL) { invalidate(key, true); } EntryInfo ei = entryInfoPool.allocate(key, dependencyIds, aliasIds); ei.setUserMetaData(userMetaData); ei.setPriority(priority); ei.setTimeLimit(timeToLive); ei.setSharingPolicy(sharingPolicy); ei.setInactivity(inactivityTime); CacheEntry ce = cacheEntryPool.allocate(); ce.copyMetaData(ei); ce.setValue(value); ce.setSkipMemoryAndWriteToDisk(skipMemoryAndWriteToDisk); if (value instanceof DistributedNioMapObject) { ce.useByteBuffer = true; } // only put and no get for when skipMemoryAndWriteToDisk is true ==> refcount will not increment com.ibm.websphere.cache.CacheEntry newEntry = cache.setEntry(ce, CachePerf.LOCAL, false, Cache.COORDINATE, incrementRefCount); ei.returnToPool(); try { if (cache.getCacheConfig().isDefaultCacheProvider()) { if (tc.isDebugEnabled()) { if (newEntry != null) { Tr.debug(tc, methodName + " " + cache.getCacheName() + " id=" + newEntry.getIdObject() + " incRefCount=" + incrementRefCount + " skipMemoryWriteToDisk=" + skipMemoryAndWriteToDisk); // depends on control dependency: [if], data = [none] } } if (ce.skipMemoryAndWriteToDisk) { int errorCode = ce.skipMemoryAndWriteToDiskErrorCode; StringBuffer message = new StringBuffer(); Exception ex = null; switch (errorCode) { case HTODDynacache.NO_EXCEPTION: break; case HTODDynacache.DISK_EXCEPTION: message.append("The disk IO exception has occurred when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); ex = cache.getDiskCacheException(); if (ex != null) { message.append(ex.getMessage()); // depends on control dependency: [if], data = [(ex] } throw new DiskIOException(message.toString()); case HTODDynacache.DISK_SIZE_OVER_LIMIT_EXCEPTION: message.append("Exception has occurred either (1) there is no disk space available, or (2) the disk cache size in GB s over the (diskCacheSizeInGB) limit when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); ex = cache.getDiskCacheException(); if (ex != null) { message.append(ex.getMessage()); // depends on control dependency: [if], data = [(ex] } throw new DiskSizeOverLimitException(message.toString()); case HTODDynacache.OTHER_EXCEPTION: message.append("The runtime exception other than a Disk IOException has occurred when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); ex = cache.getDiskCacheException(); if (ex != null) { message.append(ex.getMessage()); // depends on control dependency: [if], data = [(ex] } throw new MiscellaneousException(message.toString()); case HTODDynacache.SERIALIZATION_EXCEPTION: message.append("The serialization exception has occurred when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); throw new SerializationException(message.toString()); case HTODDynacache.DISK_SIZE_IN_ENTRIES_OVER_LIMIT_EXCEPTION: message.append("The disk cache size in entries is over the (diskCacheSize) limit when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); throw new DiskSizeInEntriesOverLimitException(message.toString()); case HTODDynacache.DISK_CACHE_ENTRY_SIZE_OVER_LIMIT_EXCEPTION: message.append("The cache entry size is over the configured disk cache entry size (diskCacheEntrySizeInMB) limit when writing cache ID: "); message.append(ce.id); message.append(" to the disk cache. "); throw new DiskCacheEntrySizeOverLimitException(message.toString()); } } } } finally { ce.value = ce.serializedValue = null; //prevent ce.reset from Nio object release ce.returnToPool(); } return newEntry; } }
public class class_name { public static void widenDomain (HttpServletRequest req, Cookie cookie) { String server = req.getServerName(); int didx = server.indexOf("."); // if no period was found (e.g. localhost) don't set the domain if (didx == -1) { return; } // if two or more periods are found (e.g. www.domain.com) strip up to the first one if (server.indexOf(".", didx+1) != -1) { cookie.setDomain(server.substring(didx)); } else { // ...otherwise prepend a "." because we're seeing something like "domain.com" cookie.setDomain("." + server); } } }
public class class_name { public static void widenDomain (HttpServletRequest req, Cookie cookie) { String server = req.getServerName(); int didx = server.indexOf("."); // if no period was found (e.g. localhost) don't set the domain if (didx == -1) { return; // depends on control dependency: [if], data = [none] } // if two or more periods are found (e.g. www.domain.com) strip up to the first one if (server.indexOf(".", didx+1) != -1) { cookie.setDomain(server.substring(didx)); // depends on control dependency: [if], data = [none] } else { // ...otherwise prepend a "." because we're seeing something like "domain.com" cookie.setDomain("." + server); // depends on control dependency: [if], data = [none] } } }
public class class_name { public String[] toArray(String[] a) { int n = data.size(); if (a.length < n) { a = new String[n]; } for (int i = 0; i < n; i++) { a[i] = data.get(i).name; } for (int i = n; i < a.length; i++) { a[i] = null; } return a; } }
public class class_name { public String[] toArray(String[] a) { int n = data.size(); if (a.length < n) { a = new String[n]; // depends on control dependency: [if], data = [none] } for (int i = 0; i < n; i++) { a[i] = data.get(i).name; // depends on control dependency: [for], data = [i] } for (int i = n; i < a.length; i++) { a[i] = null; // depends on control dependency: [for], data = [i] } return a; } }
public class class_name { public JsonElement parse(JsonReader json) throws JsonIOException, JsonSyntaxException { boolean lenient = json.isLenient(); json.setLenient(true); try { return Streams.parse(json); } catch (StackOverflowError e) { throw new JsonParseException("Failed parsing JSON source: " + json + " to Json", e); } catch (OutOfMemoryError e) { throw new JsonParseException("Failed parsing JSON source: " + json + " to Json", e); } catch (JsonParseException e) { if (e.getCause() instanceof EOFException) { return JsonNull.INSTANCE; } throw e; } finally { json.setLenient(lenient); } } }
public class class_name { public JsonElement parse(JsonReader json) throws JsonIOException, JsonSyntaxException { boolean lenient = json.isLenient(); json.setLenient(true); try { return Streams.parse(json); } catch (StackOverflowError e) { throw new JsonParseException("Failed parsing JSON source: " + json + " to Json", e); } catch (OutOfMemoryError e) { throw new JsonParseException("Failed parsing JSON source: " + json + " to Json", e); } catch (JsonParseException e) { if (e.getCause() instanceof EOFException) { return JsonNull.INSTANCE; // depends on control dependency: [if], data = [none] } throw e; } finally { json.setLenient(lenient); } } }
public class class_name { private String getToken() { SessionData session = dataMgr.getSessionDAO().session(); if (session != null && session.getExpiresOn() > System.currentTimeMillis()) { return session.getAccessToken(); } return null; } }
public class class_name { private String getToken() { SessionData session = dataMgr.getSessionDAO().session(); if (session != null && session.getExpiresOn() > System.currentTimeMillis()) { return session.getAccessToken(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public GrailsClass addArtefact(String artefactType, GrailsClass artefactGrailsClass) { ArtefactHandler handler = artefactHandlersByName.get(artefactType); if (handler.isArtefactGrailsClass(artefactGrailsClass)) { // Store the GrailsClass in cache DefaultArtefactInfo info = getArtefactInfo(artefactType, true); info.addGrailsClass(artefactGrailsClass); info.updateComplete(); initializeArtefacts(artefactType); return artefactGrailsClass; } throw new GrailsConfigurationException("Cannot add " + artefactType + " class [" + artefactGrailsClass + "]. It is not a " + artefactType + "!"); } }
public class class_name { public GrailsClass addArtefact(String artefactType, GrailsClass artefactGrailsClass) { ArtefactHandler handler = artefactHandlersByName.get(artefactType); if (handler.isArtefactGrailsClass(artefactGrailsClass)) { // Store the GrailsClass in cache DefaultArtefactInfo info = getArtefactInfo(artefactType, true); info.addGrailsClass(artefactGrailsClass); // depends on control dependency: [if], data = [none] info.updateComplete(); // depends on control dependency: [if], data = [none] initializeArtefacts(artefactType); // depends on control dependency: [if], data = [none] return artefactGrailsClass; // depends on control dependency: [if], data = [none] } throw new GrailsConfigurationException("Cannot add " + artefactType + " class [" + artefactGrailsClass + "]. It is not a " + artefactType + "!"); } }
public class class_name { public static boolean canSetCustomIPECropRatios(@NotNull MediaRequest mediaRequest, @Nullable ComponentContext wcmComponentContext) { EditConfig editConfig = null; InplaceEditingConfig ipeConfig = null; if (wcmComponentContext != null && wcmComponentContext.getEditContext() != null && wcmComponentContext.getEditContext().getEditConfig() != null && wcmComponentContext.getResource() != null) { editConfig = wcmComponentContext.getEditContext().getEditConfig(); ipeConfig = editConfig.getInplaceEditingConfig(); } if (editConfig == null || ipeConfig == null || !StringUtils.equals(ipeConfig.getEditorType(), "image")) { // no image IPE activated - never customize crop ratios return false; } switch (mediaRequest.getMediaArgs().getIPERatioCustomize()) { case ALWAYS: return true; case NEVER: return false; case AUTO: if (StringUtils.isNotEmpty(ipeConfig.getConfigPath())) { String ratiosPath = ipeConfig.getConfigPath() + "/plugins/crop/aspectRatios"; @SuppressWarnings("null") ResourceResolver resolver = wcmComponentContext.getResource().getResourceResolver(); return resolver.getResource(ratiosPath) == null; } return true; default: throw new IllegalArgumentException("Unsupported IPE ratio customize mode: " + mediaRequest.getMediaArgs().getIPERatioCustomize()); } } }
public class class_name { public static boolean canSetCustomIPECropRatios(@NotNull MediaRequest mediaRequest, @Nullable ComponentContext wcmComponentContext) { EditConfig editConfig = null; InplaceEditingConfig ipeConfig = null; if (wcmComponentContext != null && wcmComponentContext.getEditContext() != null && wcmComponentContext.getEditContext().getEditConfig() != null && wcmComponentContext.getResource() != null) { editConfig = wcmComponentContext.getEditContext().getEditConfig(); // depends on control dependency: [if], data = [none] ipeConfig = editConfig.getInplaceEditingConfig(); // depends on control dependency: [if], data = [none] } if (editConfig == null || ipeConfig == null || !StringUtils.equals(ipeConfig.getEditorType(), "image")) { // no image IPE activated - never customize crop ratios return false; // depends on control dependency: [if], data = [none] } switch (mediaRequest.getMediaArgs().getIPERatioCustomize()) { case ALWAYS: return true; case NEVER: return false; case AUTO: if (StringUtils.isNotEmpty(ipeConfig.getConfigPath())) { String ratiosPath = ipeConfig.getConfigPath() + "/plugins/crop/aspectRatios"; @SuppressWarnings("null") ResourceResolver resolver = wcmComponentContext.getResource().getResourceResolver(); return resolver.getResource(ratiosPath) == null; // depends on control dependency: [if], data = [none] } return true; default: throw new IllegalArgumentException("Unsupported IPE ratio customize mode: " + mediaRequest.getMediaArgs().getIPERatioCustomize()); } } }
public class class_name { public static final void rotate(Structure structure, Matrix m){ AtomIterator iter = new AtomIterator(structure) ; while (iter.hasNext()) { Atom atom = iter.next() ; rotate(atom,m); } } }
public class class_name { public static final void rotate(Structure structure, Matrix m){ AtomIterator iter = new AtomIterator(structure) ; while (iter.hasNext()) { Atom atom = iter.next() ; rotate(atom,m); // depends on control dependency: [while], data = [none] } } }
public class class_name { static int toInt(String input, int offset, int length) { if (length == 1) { return input.charAt(offset) - '0'; } int out = 0; for (int i = offset + length - 1, factor = 1; i >= offset; --i, factor *= 10) { out += (input.charAt(i) - '0') * factor; } return out; } }
public class class_name { static int toInt(String input, int offset, int length) { if (length == 1) { return input.charAt(offset) - '0'; // depends on control dependency: [if], data = [none] } int out = 0; for (int i = offset + length - 1, factor = 1; i >= offset; --i, factor *= 10) { out += (input.charAt(i) - '0') * factor; // depends on control dependency: [for], data = [i] } return out; } }
public class class_name { public static <T> T navigateStrict( final Object source, final Object... paths ) throws UnsupportedOperationException { Object destination = source; for ( Object path : paths ) { if ( path == null ) { throw new UnsupportedOperationException("path is null"); } if ( destination == null ) { throw new UnsupportedOperationException("source is null"); } if ( destination instanceof Map ) { Map temp = (Map) destination; if (temp.containsKey( path ) ) { // if we don't check for containsKey first, then the Map.get call // would return null for keys that don't actually exist. destination = ((Map) destination).get(path); } else { throw new UnsupportedOperationException("no entry for '" + path + "' found while traversing the JSON"); } } else if ( destination instanceof List ) { if ( ! (path instanceof Integer) ) { throw new UnsupportedOperationException( "path '" + path + "' is trying to be used as an array index"); } List destList = (List) destination; int pathInt = (Integer) path; if ( pathInt < 0 || pathInt > destList.size() ) { throw new UnsupportedOperationException( "path '" + path + "' is negative or outside the range of the list"); } destination = destList.get( pathInt ); } else { throw new UnsupportedOperationException("Navigation supports only Map and List source types and non-null String and Integer path types"); } } return cast(destination); } }
public class class_name { public static <T> T navigateStrict( final Object source, final Object... paths ) throws UnsupportedOperationException { Object destination = source; for ( Object path : paths ) { if ( path == null ) { throw new UnsupportedOperationException("path is null"); } if ( destination == null ) { throw new UnsupportedOperationException("source is null"); } if ( destination instanceof Map ) { Map temp = (Map) destination; if (temp.containsKey( path ) ) { // if we don't check for containsKey first, then the Map.get call // would return null for keys that don't actually exist. destination = ((Map) destination).get(path); // depends on control dependency: [if], data = [none] } else { throw new UnsupportedOperationException("no entry for '" + path + "' found while traversing the JSON"); } } else if ( destination instanceof List ) { if ( ! (path instanceof Integer) ) { throw new UnsupportedOperationException( "path '" + path + "' is trying to be used as an array index"); } List destList = (List) destination; int pathInt = (Integer) path; if ( pathInt < 0 || pathInt > destList.size() ) { throw new UnsupportedOperationException( "path '" + path + "' is negative or outside the range of the list"); } destination = destList.get( pathInt ); // depends on control dependency: [if], data = [none] } else { throw new UnsupportedOperationException("Navigation supports only Map and List source types and non-null String and Integer path types"); } } return cast(destination); } }
public class class_name { public void generateCucumberITFiles(final File outputDirectory, final Collection<File> featureFiles) throws MojoExecutionException { Parser<GherkinDocument> parser = new Parser<GherkinDocument>(new AstBuilder()); TagPredicate tagPredicate = new TagPredicate(overriddenParameters.getTags()); TokenMatcher matcher = new TokenMatcher(); for (final File file : featureFiles) { GherkinDocument gherkinDocument = null; final List<Pickle> acceptedPickles = new ArrayList<Pickle>(); try { String source = FileUtils.readFileToString(file); gherkinDocument = parser.parse(source, matcher); Compiler compiler = new Compiler(); List<Pickle> pickles = compiler.compile(gherkinDocument); for (Pickle pickle : pickles) { if (tagPredicate.apply(pickle.getTags())) { acceptedPickles.add(pickle); } } } catch (final IOException e) { // should never happen // TODO - proper logging System.out.println(format("WARNING: Failed to parse '%s'...IGNORING", file.getName())); } for (Pickle pickle : acceptedPickles) { int locationIndex = pickle.getLocations().size(); final Location location = findLocationByIndex(pickle, 0); //Scenario Outline has a first location the position on the table //and second one is the position of scenario self. final Location locationToCompare = findLocationByIndex(pickle, locationIndex - 1); outputFileName = classNamingScheme.generate(file.getName()); setFeatureFileLocation(file, location); setParsedFeature(gherkinDocument.getFeature()); setParsedScenario(findScenarioDefinitionViaLocation(locationToCompare, gherkinDocument)); writeFile(outputDirectory); } } } }
public class class_name { public void generateCucumberITFiles(final File outputDirectory, final Collection<File> featureFiles) throws MojoExecutionException { Parser<GherkinDocument> parser = new Parser<GherkinDocument>(new AstBuilder()); TagPredicate tagPredicate = new TagPredicate(overriddenParameters.getTags()); TokenMatcher matcher = new TokenMatcher(); for (final File file : featureFiles) { GherkinDocument gherkinDocument = null; final List<Pickle> acceptedPickles = new ArrayList<Pickle>(); try { String source = FileUtils.readFileToString(file); gherkinDocument = parser.parse(source, matcher); // depends on control dependency: [try], data = [none] Compiler compiler = new Compiler(); List<Pickle> pickles = compiler.compile(gherkinDocument); for (Pickle pickle : pickles) { if (tagPredicate.apply(pickle.getTags())) { acceptedPickles.add(pickle); // depends on control dependency: [if], data = [none] } } } catch (final IOException e) { // should never happen // TODO - proper logging System.out.println(format("WARNING: Failed to parse '%s'...IGNORING", file.getName())); } // depends on control dependency: [catch], data = [none] for (Pickle pickle : acceptedPickles) { int locationIndex = pickle.getLocations().size(); final Location location = findLocationByIndex(pickle, 0); //Scenario Outline has a first location the position on the table //and second one is the position of scenario self. final Location locationToCompare = findLocationByIndex(pickle, locationIndex - 1); outputFileName = classNamingScheme.generate(file.getName()); // depends on control dependency: [for], data = [none] setFeatureFileLocation(file, location); // depends on control dependency: [for], data = [none] setParsedFeature(gherkinDocument.getFeature()); // depends on control dependency: [for], data = [none] setParsedScenario(findScenarioDefinitionViaLocation(locationToCompare, gherkinDocument)); // depends on control dependency: [for], data = [none] writeFile(outputDirectory); // depends on control dependency: [for], data = [none] } } } }
public class class_name { public static boolean hasHDFSDelegationToken() throws Exception { UserGroupInformation loginUser = UserGroupInformation.getCurrentUser(); Collection<Token<? extends TokenIdentifier>> usrTok = loginUser.getTokens(); for (Token<? extends TokenIdentifier> token : usrTok) { if (token.getKind().equals(HDFS_DELEGATION_TOKEN_KIND)) { return true; } } return false; } }
public class class_name { public static boolean hasHDFSDelegationToken() throws Exception { UserGroupInformation loginUser = UserGroupInformation.getCurrentUser(); Collection<Token<? extends TokenIdentifier>> usrTok = loginUser.getTokens(); for (Token<? extends TokenIdentifier> token : usrTok) { if (token.getKind().equals(HDFS_DELEGATION_TOKEN_KIND)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public static <T> Supplier<T> memoize(final Supplier<T> supplier) { return new Supplier<T>() { private volatile boolean initialized = false; private T instance = null; @Override public T get() { if (initialized == false) { synchronized (this) { if (initialized == false) { instance = supplier.get(); } } } return instance; } }; } }
public class class_name { public static <T> Supplier<T> memoize(final Supplier<T> supplier) { return new Supplier<T>() { private volatile boolean initialized = false; private T instance = null; @Override public T get() { if (initialized == false) { synchronized (this) { // depends on control dependency: [if], data = [none] if (initialized == false) { instance = supplier.get(); // depends on control dependency: [if], data = [none] } } } return instance; } }; } }
public class class_name { @EventListener(classes = CancelTargetAssignmentEvent.class) protected void targetCancelAssignmentToDistributionSet(final CancelTargetAssignmentEvent cancelEvent) { if (isNotFromSelf(cancelEvent)) { return; } sendCancelMessageToTarget(cancelEvent.getTenant(), cancelEvent.getEntity().getControllerId(), cancelEvent.getActionId(), cancelEvent.getEntity().getAddress()); } }
public class class_name { @EventListener(classes = CancelTargetAssignmentEvent.class) protected void targetCancelAssignmentToDistributionSet(final CancelTargetAssignmentEvent cancelEvent) { if (isNotFromSelf(cancelEvent)) { return; // depends on control dependency: [if], data = [none] } sendCancelMessageToTarget(cancelEvent.getTenant(), cancelEvent.getEntity().getControllerId(), cancelEvent.getActionId(), cancelEvent.getEntity().getAddress()); } }
public class class_name { private void handleBody(HttpRequestBase httpRequest) { if (hasBody()) { HttpEntityEnclosingRequestBase rq = (HttpEntityEnclosingRequestBase) httpRequest; if (!formParams.isEmpty()) { handleFormParameters(rq); } else if (body != null) { handleRequestBody(rq); } } } }
public class class_name { private void handleBody(HttpRequestBase httpRequest) { if (hasBody()) { HttpEntityEnclosingRequestBase rq = (HttpEntityEnclosingRequestBase) httpRequest; if (!formParams.isEmpty()) { handleFormParameters(rq); // depends on control dependency: [if], data = [none] } else if (body != null) { handleRequestBody(rq); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private static boolean multiPointWithinEnvelope_(MultiPoint multipoint_a, Envelope envelope_b, double tolerance, ProgressTracker progress_tracker) { Envelope2D env_a = new Envelope2D(), env_b = new Envelope2D(); multipoint_a.queryEnvelope2D(env_a); envelope_b.queryEnvelope2D(env_b); if (!envelopeInfContainsEnvelope_(env_b, env_a, tolerance)) return false; if (env_b.getHeight() <= tolerance && env_b.getWidth() <= tolerance) return envelopeEqualsEnvelope_(env_a, env_b, tolerance, progress_tracker); // treat as point if (env_b.getHeight() <= tolerance || env_b.getWidth() <= tolerance) {// treat // as // line boolean b_interior = false; Envelope2D env_b_deflated = new Envelope2D(), env_b_inflated = new Envelope2D(); env_b_deflated.setCoords(env_b); env_b_inflated.setCoords(env_b); if (env_b.getHeight() > tolerance) env_b_deflated.inflate(0, -tolerance); else env_b_deflated.inflate(-tolerance, 0); env_b_inflated.inflate(tolerance, tolerance); Point2D pt_a = new Point2D(); for (int i = 0; i < multipoint_a.getPointCount(); i++) { multipoint_a.getXY(i, pt_a); if (!env_b_inflated.contains(pt_a)) return false; if (env_b.getHeight() > tolerance) { if (pt_a.y > env_b_deflated.ymin && pt_a.y < env_b_deflated.ymax) b_interior = true; } else { if (pt_a.x > env_b_deflated.xmin && pt_a.x < env_b_deflated.xmax) b_interior = true; } } return b_interior; } // treat as area boolean b_interior = false; Envelope2D env_b_deflated = new Envelope2D(), env_b_inflated = new Envelope2D(); env_b_deflated.setCoords(env_b); env_b_inflated.setCoords(env_b); env_b_deflated.inflate(-tolerance, -tolerance); env_b_inflated.inflate(tolerance, tolerance); Point2D pt_a = new Point2D(); // we loop to find a proper interior intersection (i.e. something inside // instead of just on the boundary) for (int i = 0; i < multipoint_a.getPointCount(); i++) { multipoint_a.getXY(i, pt_a); if (!env_b_inflated.contains(pt_a)) return false; if (env_b_deflated.containsExclusive(pt_a)) b_interior = true; } return b_interior; } }
public class class_name { private static boolean multiPointWithinEnvelope_(MultiPoint multipoint_a, Envelope envelope_b, double tolerance, ProgressTracker progress_tracker) { Envelope2D env_a = new Envelope2D(), env_b = new Envelope2D(); multipoint_a.queryEnvelope2D(env_a); envelope_b.queryEnvelope2D(env_b); if (!envelopeInfContainsEnvelope_(env_b, env_a, tolerance)) return false; if (env_b.getHeight() <= tolerance && env_b.getWidth() <= tolerance) return envelopeEqualsEnvelope_(env_a, env_b, tolerance, progress_tracker); // treat as point if (env_b.getHeight() <= tolerance || env_b.getWidth() <= tolerance) {// treat // as // line boolean b_interior = false; Envelope2D env_b_deflated = new Envelope2D(), env_b_inflated = new Envelope2D(); env_b_deflated.setCoords(env_b); // depends on control dependency: [if], data = [none] env_b_inflated.setCoords(env_b); // depends on control dependency: [if], data = [none] if (env_b.getHeight() > tolerance) env_b_deflated.inflate(0, -tolerance); else env_b_deflated.inflate(-tolerance, 0); env_b_inflated.inflate(tolerance, tolerance); // depends on control dependency: [if], data = [none] Point2D pt_a = new Point2D(); for (int i = 0; i < multipoint_a.getPointCount(); i++) { multipoint_a.getXY(i, pt_a); // depends on control dependency: [for], data = [i] if (!env_b_inflated.contains(pt_a)) return false; if (env_b.getHeight() > tolerance) { if (pt_a.y > env_b_deflated.ymin && pt_a.y < env_b_deflated.ymax) b_interior = true; } else { if (pt_a.x > env_b_deflated.xmin && pt_a.x < env_b_deflated.xmax) b_interior = true; } } return b_interior; // depends on control dependency: [if], data = [none] } // treat as area boolean b_interior = false; Envelope2D env_b_deflated = new Envelope2D(), env_b_inflated = new Envelope2D(); env_b_deflated.setCoords(env_b); env_b_inflated.setCoords(env_b); env_b_deflated.inflate(-tolerance, -tolerance); env_b_inflated.inflate(tolerance, tolerance); Point2D pt_a = new Point2D(); // we loop to find a proper interior intersection (i.e. something inside // instead of just on the boundary) for (int i = 0; i < multipoint_a.getPointCount(); i++) { multipoint_a.getXY(i, pt_a); // depends on control dependency: [for], data = [i] if (!env_b_inflated.contains(pt_a)) return false; if (env_b_deflated.containsExclusive(pt_a)) b_interior = true; } return b_interior; } }
public class class_name { public GeometryIndex populate(TableIndex tableIndex, long geomId, GeometryEnvelope envelope) { GeometryIndex geometryIndex = new GeometryIndex(); geometryIndex.setTableIndex(tableIndex); geometryIndex.setGeomId(geomId); geometryIndex.setMinX(envelope.getMinX()); geometryIndex.setMaxX(envelope.getMaxX()); geometryIndex.setMinY(envelope.getMinY()); geometryIndex.setMaxY(envelope.getMaxY()); if (envelope.hasZ()) { geometryIndex.setMinZ(envelope.getMinZ()); geometryIndex.setMaxZ(envelope.getMaxZ()); } if (envelope.hasM()) { geometryIndex.setMinM(envelope.getMinM()); geometryIndex.setMaxM(envelope.getMaxM()); } return geometryIndex; } }
public class class_name { public GeometryIndex populate(TableIndex tableIndex, long geomId, GeometryEnvelope envelope) { GeometryIndex geometryIndex = new GeometryIndex(); geometryIndex.setTableIndex(tableIndex); geometryIndex.setGeomId(geomId); geometryIndex.setMinX(envelope.getMinX()); geometryIndex.setMaxX(envelope.getMaxX()); geometryIndex.setMinY(envelope.getMinY()); geometryIndex.setMaxY(envelope.getMaxY()); if (envelope.hasZ()) { geometryIndex.setMinZ(envelope.getMinZ()); // depends on control dependency: [if], data = [none] geometryIndex.setMaxZ(envelope.getMaxZ()); // depends on control dependency: [if], data = [none] } if (envelope.hasM()) { geometryIndex.setMinM(envelope.getMinM()); // depends on control dependency: [if], data = [none] geometryIndex.setMaxM(envelope.getMaxM()); // depends on control dependency: [if], data = [none] } return geometryIndex; } }
public class class_name { @Override protected final void acknowledgeIDs(long checkpointId, Set<UId> uniqueIds) { LOG.debug("Acknowledging ids for checkpoint {}", checkpointId); Iterator<Tuple2<Long, List<SessionId>>> iterator = sessionIdsPerSnapshot.iterator(); while (iterator.hasNext()) { final Tuple2<Long, List<SessionId>> next = iterator.next(); long id = next.f0; if (id <= checkpointId) { acknowledgeSessionIDs(next.f1); // remove ids for this session iterator.remove(); } } } }
public class class_name { @Override protected final void acknowledgeIDs(long checkpointId, Set<UId> uniqueIds) { LOG.debug("Acknowledging ids for checkpoint {}", checkpointId); Iterator<Tuple2<Long, List<SessionId>>> iterator = sessionIdsPerSnapshot.iterator(); while (iterator.hasNext()) { final Tuple2<Long, List<SessionId>> next = iterator.next(); long id = next.f0; if (id <= checkpointId) { acknowledgeSessionIDs(next.f1); // depends on control dependency: [if], data = [none] // remove ids for this session iterator.remove(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private void addEPStatementListener(UpdateListener listener) { if (this.subscriber == null) { if (epStatement != null) { epStatement.addListener(listener); } } } }
public class class_name { private void addEPStatementListener(UpdateListener listener) { if (this.subscriber == null) { if (epStatement != null) { epStatement.addListener(listener); // depends on control dependency: [if], data = [none] } } } }
public class class_name { private static FileInputStream openFileRead(String dirName, String fullName, String firstPart, String secondPart) { try { return new FileInputStream(new File(dirName, fullName)); } catch (FileNotFoundException ex1) { // If file name is too long hash it and try again. String message = ex1.getMessage(); if (message != null && message.contains("File name too long")) { if (secondPart != null) { long hashedSecondPart = Hasher.hashString(secondPart); fullName = firstPart + "." + Long.toString(hashedSecondPart); // Note that we pass fullName as the second argument too. return openFileRead(dirName, fullName, fullName, null); } else if (firstPart != null) { long hashedFirstPart = Hasher.hashString(firstPart); fullName = Long.toString(hashedFirstPart); return openFileRead(dirName, fullName, null, null); } else { // No hope. Log.w("Could not open file for reading (name too long) " + fullName); } } return null; } } }
public class class_name { private static FileInputStream openFileRead(String dirName, String fullName, String firstPart, String secondPart) { try { return new FileInputStream(new File(dirName, fullName)); // depends on control dependency: [try], data = [none] } catch (FileNotFoundException ex1) { // If file name is too long hash it and try again. String message = ex1.getMessage(); if (message != null && message.contains("File name too long")) { if (secondPart != null) { long hashedSecondPart = Hasher.hashString(secondPart); fullName = firstPart + "." + Long.toString(hashedSecondPart); // depends on control dependency: [if], data = [none] // Note that we pass fullName as the second argument too. return openFileRead(dirName, fullName, fullName, null); // depends on control dependency: [if], data = [null)] } else if (firstPart != null) { long hashedFirstPart = Hasher.hashString(firstPart); fullName = Long.toString(hashedFirstPart); // depends on control dependency: [if], data = [none] return openFileRead(dirName, fullName, null, null); // depends on control dependency: [if], data = [null)] } else { // No hope. Log.w("Could not open file for reading (name too long) " + fullName); // depends on control dependency: [if], data = [none] } } return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public boolean shouldReplace(MethodMember other) { if (!name.equals(other.name)) { return false; } if (!descriptor.equals(other.descriptor)) { return false; } return true; } }
public class class_name { public boolean shouldReplace(MethodMember other) { if (!name.equals(other.name)) { return false; // depends on control dependency: [if], data = [none] } if (!descriptor.equals(other.descriptor)) { return false; // depends on control dependency: [if], data = [none] } return true; } }
public class class_name { public long transferTo(long pos, long count, WritableByteChannel dest) throws IOException { long bytesToRead = bytesToRead(pos, count); if (bytesToRead > 0) { long remaining = bytesToRead; int blockIndex = blockIndex(pos); byte[] block = blocks[blockIndex]; int off = offsetInBlock(pos); ByteBuffer buf = ByteBuffer.wrap(block, off, length(off, remaining)); while (buf.hasRemaining()) { remaining -= dest.write(buf); } buf.clear(); while (remaining > 0) { int index = ++blockIndex; block = blocks[index]; buf = ByteBuffer.wrap(block, 0, length(remaining)); while (buf.hasRemaining()) { remaining -= dest.write(buf); } buf.clear(); } } return Math.max(bytesToRead, 0); // don't return -1 for this method } }
public class class_name { public long transferTo(long pos, long count, WritableByteChannel dest) throws IOException { long bytesToRead = bytesToRead(pos, count); if (bytesToRead > 0) { long remaining = bytesToRead; int blockIndex = blockIndex(pos); byte[] block = blocks[blockIndex]; int off = offsetInBlock(pos); ByteBuffer buf = ByteBuffer.wrap(block, off, length(off, remaining)); while (buf.hasRemaining()) { remaining -= dest.write(buf); // depends on control dependency: [while], data = [none] } buf.clear(); while (remaining > 0) { int index = ++blockIndex; block = blocks[index]; // depends on control dependency: [while], data = [none] buf = ByteBuffer.wrap(block, 0, length(remaining)); // depends on control dependency: [while], data = [(remaining] while (buf.hasRemaining()) { remaining -= dest.write(buf); // depends on control dependency: [while], data = [none] } buf.clear(); // depends on control dependency: [while], data = [none] } } return Math.max(bytesToRead, 0); // don't return -1 for this method } }
public class class_name { public List<JAXBElement<Object>> get_GenericApplicationPropertyOfBridge() { if (_GenericApplicationPropertyOfBridge == null) { _GenericApplicationPropertyOfBridge = new ArrayList<JAXBElement<Object>>(); } return this._GenericApplicationPropertyOfBridge; } }
public class class_name { public List<JAXBElement<Object>> get_GenericApplicationPropertyOfBridge() { if (_GenericApplicationPropertyOfBridge == null) { _GenericApplicationPropertyOfBridge = new ArrayList<JAXBElement<Object>>(); // depends on control dependency: [if], data = [none] } return this._GenericApplicationPropertyOfBridge; } }
public class class_name { public void setIcon(String iconClasses, String detailIconClasses) { m_iconPanel.setVisible(true); HTML iconWidget = new HTML(); m_iconPanel.setWidget(iconWidget); iconWidget.setStyleName(iconClasses + " " + m_fixedIconClasses); // render the detail icon as an overlay above the main icon, if required if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(detailIconClasses)) { iconWidget.setHTML( "<span class=\"" + detailIconClasses + " " + I_CmsLayoutBundle.INSTANCE.listItemWidgetCss().pageDetailType() + "\"></span>"); } } }
public class class_name { public void setIcon(String iconClasses, String detailIconClasses) { m_iconPanel.setVisible(true); HTML iconWidget = new HTML(); m_iconPanel.setWidget(iconWidget); iconWidget.setStyleName(iconClasses + " " + m_fixedIconClasses); // render the detail icon as an overlay above the main icon, if required if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(detailIconClasses)) { iconWidget.setHTML( "<span class=\"" + detailIconClasses + " " + I_CmsLayoutBundle.INSTANCE.listItemWidgetCss().pageDetailType() + "\"></span>"); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override protected E load() { context = BundleReference.class.cast(serviceType.getClassLoader()).getBundle().getBundleContext(); ServiceReference<T> ref = context.getServiceReference(serviceType); try{ if(ref == null){ return null; } T service = context.getService(ref); return doLoad(service); }catch(Exception e){ LOGGER.error("Could not load object from service. Returning null. Service called: "+serviceType+" from bundle "+context.getBundle().getSymbolicName(), e); return null; }finally{ if(ref != null) context.ungetService(ref); } } }
public class class_name { @Override protected E load() { context = BundleReference.class.cast(serviceType.getClassLoader()).getBundle().getBundleContext(); ServiceReference<T> ref = context.getServiceReference(serviceType); try{ if(ref == null){ return null; // depends on control dependency: [if], data = [none] } T service = context.getService(ref); return doLoad(service); // depends on control dependency: [try], data = [none] }catch(Exception e){ LOGGER.error("Could not load object from service. Returning null. Service called: "+serviceType+" from bundle "+context.getBundle().getSymbolicName(), e); return null; }finally{ // depends on control dependency: [catch], data = [none] if(ref != null) context.ungetService(ref); } } }
public class class_name { public void marshall(DeclineInvitationsRequest declineInvitationsRequest, ProtocolMarshaller protocolMarshaller) { if (declineInvitationsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(declineInvitationsRequest.getAccountIds(), ACCOUNTIDS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DeclineInvitationsRequest declineInvitationsRequest, ProtocolMarshaller protocolMarshaller) { if (declineInvitationsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(declineInvitationsRequest.getAccountIds(), ACCOUNTIDS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @SuppressWarnings("unchecked") protected String[] getUserRoles(SOAPMessageContext context) { HttpServletRequest request = (HttpServletRequest) context .get(SOAPMessageContext.SERVLET_REQUEST); Map<String, Set<String>> reqAttr = null; reqAttr = (Map<String, Set<String>>) request .getAttribute("FEDORA_AUX_SUBJECT_ATTRIBUTES"); if (reqAttr == null) { return null; } Set<String> fedoraRoles = reqAttr.get("fedoraRole"); if (fedoraRoles == null || fedoraRoles.size() == 0) { return null; } String[] fedoraRole = fedoraRoles.toArray(new String[fedoraRoles.size()]); return fedoraRole; } }
public class class_name { @SuppressWarnings("unchecked") protected String[] getUserRoles(SOAPMessageContext context) { HttpServletRequest request = (HttpServletRequest) context .get(SOAPMessageContext.SERVLET_REQUEST); Map<String, Set<String>> reqAttr = null; reqAttr = (Map<String, Set<String>>) request .getAttribute("FEDORA_AUX_SUBJECT_ATTRIBUTES"); if (reqAttr == null) { return null; // depends on control dependency: [if], data = [none] } Set<String> fedoraRoles = reqAttr.get("fedoraRole"); if (fedoraRoles == null || fedoraRoles.size() == 0) { return null; // depends on control dependency: [if], data = [none] } String[] fedoraRole = fedoraRoles.toArray(new String[fedoraRoles.size()]); return fedoraRole; } }
public class class_name { @Override public void doSessionCreated() throws Exception { // establish the user principals. // XXX There's a question about what to do if they are changed on revalidate Set<Class<Principal>> userPrincipalClasses = serviceManagementBean.getUserPrincipalClasses(); if (userPrincipalClasses != null && !userPrincipalClasses.isEmpty()) { Map<String, String> userPrincipals = new HashMap<>(); Subject subject = session.getSubject(); if (subject != null) { Set<Principal> principals = subject.getPrincipals(); for (Principal principal : principals) { String principalName = principal.getName(); for (Class<Principal> userPrincipal : userPrincipalClasses) { if (userPrincipal.isInstance(principal)) { userPrincipals.put(principalName, principal.getClass().getName()); } } } // The following MUST run ON the IO thread. setUserPrincipals(userPrincipals); } } } }
public class class_name { @Override public void doSessionCreated() throws Exception { // establish the user principals. // XXX There's a question about what to do if they are changed on revalidate Set<Class<Principal>> userPrincipalClasses = serviceManagementBean.getUserPrincipalClasses(); if (userPrincipalClasses != null && !userPrincipalClasses.isEmpty()) { Map<String, String> userPrincipals = new HashMap<>(); Subject subject = session.getSubject(); if (subject != null) { Set<Principal> principals = subject.getPrincipals(); for (Principal principal : principals) { String principalName = principal.getName(); for (Class<Principal> userPrincipal : userPrincipalClasses) { if (userPrincipal.isInstance(principal)) { userPrincipals.put(principalName, principal.getClass().getName()); // depends on control dependency: [if], data = [none] } } } // The following MUST run ON the IO thread. setUserPrincipals(userPrincipals); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static BigExcelWriter getBigWriter(File destFile) { try { return new BigExcelWriter(destFile); } catch (NoClassDefFoundError e) { throw new DependencyException(ObjectUtil.defaultIfNull(e.getCause(), e), PoiChecker.NO_POI_ERROR_MSG); } } }
public class class_name { public static BigExcelWriter getBigWriter(File destFile) { try { return new BigExcelWriter(destFile); // depends on control dependency: [try], data = [none] } catch (NoClassDefFoundError e) { throw new DependencyException(ObjectUtil.defaultIfNull(e.getCause(), e), PoiChecker.NO_POI_ERROR_MSG); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private int correctForExpungedMessages(int absoluteMsn) { int correctedMsn = absoluteMsn; // Loop through the expunged list backwards, adjusting the msn as we go. for (int i = expungedMsns.size() - 1; i >= 0; i--) { int expunged = expungedMsns.get(i); if (expunged <= absoluteMsn) { correctedMsn++; } } return correctedMsn; } }
public class class_name { private int correctForExpungedMessages(int absoluteMsn) { int correctedMsn = absoluteMsn; // Loop through the expunged list backwards, adjusting the msn as we go. for (int i = expungedMsns.size() - 1; i >= 0; i--) { int expunged = expungedMsns.get(i); if (expunged <= absoluteMsn) { correctedMsn++; // depends on control dependency: [if], data = [none] } } return correctedMsn; } }
public class class_name { public void setProcessPid(java.util.Collection<NumberFilter> processPid) { if (processPid == null) { this.processPid = null; return; } this.processPid = new java.util.ArrayList<NumberFilter>(processPid); } }
public class class_name { public void setProcessPid(java.util.Collection<NumberFilter> processPid) { if (processPid == null) { this.processPid = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.processPid = new java.util.ArrayList<NumberFilter>(processPid); } }
public class class_name { public static void showBooks(final ListOfBooks booksList){ if(booksList != null && booksList.getBook() != null && !booksList.getBook().isEmpty()){ List<BookType> books = booksList.getBook(); System.out.println("\nNumber of books: " + books.size()); int cnt = 0; for (BookType book : books) { System.out.println("\nBookNum: " + (cnt++ + 1)); List<PersonType> authors = book.getAuthor(); if(authors != null && !authors.isEmpty()){ for (PersonType author : authors) { System.out.println("Author: " + author.getFirstName() + " " + author.getLastName()); } } System.out.println("Title: " + book.getTitle()); System.out.println("Year: " + book.getYearPublished()); if(book.getISBN()!=null){ System.out.println("ISBN: " + book.getISBN()); } } }else{ System.out.println("List of books is empty"); } System.out.println(""); } }
public class class_name { public static void showBooks(final ListOfBooks booksList){ if(booksList != null && booksList.getBook() != null && !booksList.getBook().isEmpty()){ List<BookType> books = booksList.getBook(); System.out.println("\nNumber of books: " + books.size()); // depends on control dependency: [if], data = [none] int cnt = 0; for (BookType book : books) { System.out.println("\nBookNum: " + (cnt++ + 1)); // depends on control dependency: [for], data = [none] List<PersonType> authors = book.getAuthor(); if(authors != null && !authors.isEmpty()){ for (PersonType author : authors) { System.out.println("Author: " + author.getFirstName() + " " + author.getLastName()); // depends on control dependency: [for], data = [none] } } System.out.println("Title: " + book.getTitle()); // depends on control dependency: [for], data = [book] System.out.println("Year: " + book.getYearPublished()); // depends on control dependency: [for], data = [book] if(book.getISBN()!=null){ System.out.println("ISBN: " + book.getISBN()); // depends on control dependency: [if], data = [none] } } }else{ System.out.println("List of books is empty"); // depends on control dependency: [if], data = [none] } System.out.println(""); } }
public class class_name { private void initShowAnimation(TypedArray attrs) { int index = R.styleable.ActionButton_show_animation; if (attrs.hasValue(index)) { int animResId = attrs.getResourceId(index, Animations.NONE.animResId); showAnimation = Animations.load(getContext(), animResId); LOGGER.trace("Initialized Action Button show animation"); } } }
public class class_name { private void initShowAnimation(TypedArray attrs) { int index = R.styleable.ActionButton_show_animation; if (attrs.hasValue(index)) { int animResId = attrs.getResourceId(index, Animations.NONE.animResId); showAnimation = Animations.load(getContext(), animResId); // depends on control dependency: [if], data = [none] LOGGER.trace("Initialized Action Button show animation"); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void createSystemConnection() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "createSystemConnection"); Subject subject = _authorisationUtils.getSIBServerSubject(); try { _connectionToMP = (MPCoreConnection) createConnection(subject, true, null); } catch (SIResourceException e) { // FFDC FFDCFilter .processException( e, "com.ibm.ws.sib.processor.impl.MessageProcessor.createSystemConnection", "1:2529:1.445", this); // Won't ever be thrown if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "createSystemConnection", "SIErrorException " + e); throw new SIErrorException(e); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "createSystemConnection"); } }
public class class_name { private void createSystemConnection() { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry(tc, "createSystemConnection"); Subject subject = _authorisationUtils.getSIBServerSubject(); try { _connectionToMP = (MPCoreConnection) createConnection(subject, true, null); // depends on control dependency: [try], data = [none] } catch (SIResourceException e) { // FFDC FFDCFilter .processException( e, "com.ibm.ws.sib.processor.impl.MessageProcessor.createSystemConnection", "1:2529:1.445", this); // Won't ever be thrown if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "createSystemConnection", "SIErrorException " + e); throw new SIErrorException(e); } // depends on control dependency: [catch], data = [none] if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "createSystemConnection"); } }
public class class_name { private static Map<byte[], byte[]> createCfRenameMap(Configuration conf) { Map<byte[], byte[]> cfRenameMap = null; String allMappingsPropVal = conf.get(CF_RENAME_PROP); if(allMappingsPropVal != null) { // The conf value format should be sourceCf1:destCf1,sourceCf2:destCf2,... String[] allMappings = allMappingsPropVal.split(","); for (String mapping: allMappings) { if(cfRenameMap == null) { cfRenameMap = new TreeMap<byte[],byte[]>(Bytes.BYTES_COMPARATOR); } String [] srcAndDest = mapping.split(":"); if(srcAndDest.length != 2) { continue; } cfRenameMap.put(srcAndDest[0].getBytes(), srcAndDest[1].getBytes()); } } return cfRenameMap; } }
public class class_name { private static Map<byte[], byte[]> createCfRenameMap(Configuration conf) { Map<byte[], byte[]> cfRenameMap = null; String allMappingsPropVal = conf.get(CF_RENAME_PROP); if(allMappingsPropVal != null) { // The conf value format should be sourceCf1:destCf1,sourceCf2:destCf2,... String[] allMappings = allMappingsPropVal.split(","); for (String mapping: allMappings) { if(cfRenameMap == null) { cfRenameMap = new TreeMap<byte[],byte[]>(Bytes.BYTES_COMPARATOR); // depends on control dependency: [if], data = [none] } String [] srcAndDest = mapping.split(":"); if(srcAndDest.length != 2) { continue; } cfRenameMap.put(srcAndDest[0].getBytes(), srcAndDest[1].getBytes()); // depends on control dependency: [for], data = [none] } } return cfRenameMap; } }
public class class_name { private Comparator<String> getInitedAliasComparator() { if (MapUtil.isEmpty(this.headerAlias)) { return null; } Comparator<String> aliasComparator = this.aliasComparator; if (null == aliasComparator) { Set<String> keySet = this.headerAlias.keySet(); aliasComparator = new IndexedComparator<>(keySet.toArray(new String[keySet.size()])); this.aliasComparator = aliasComparator; } return aliasComparator; } }
public class class_name { private Comparator<String> getInitedAliasComparator() { if (MapUtil.isEmpty(this.headerAlias)) { return null; // depends on control dependency: [if], data = [none] } Comparator<String> aliasComparator = this.aliasComparator; if (null == aliasComparator) { Set<String> keySet = this.headerAlias.keySet(); aliasComparator = new IndexedComparator<>(keySet.toArray(new String[keySet.size()])); // depends on control dependency: [if], data = [none] this.aliasComparator = aliasComparator; // depends on control dependency: [if], data = [none] } return aliasComparator; } }
public class class_name { public static DMatrixRMaj sumRows(DMatrixRMaj input , DMatrixRMaj output ) { if( output == null ) { output = new DMatrixRMaj(input.numRows,1); } else { output.reshape(input.numRows,1); } for( int row = 0; row < input.numRows; row++ ) { double total = 0; int end = (row+1)*input.numCols; for( int index = row*input.numCols; index < end; index++ ) { total += input.data[index]; } output.set(row,total); } return output; } }
public class class_name { public static DMatrixRMaj sumRows(DMatrixRMaj input , DMatrixRMaj output ) { if( output == null ) { output = new DMatrixRMaj(input.numRows,1); // depends on control dependency: [if], data = [none] } else { output.reshape(input.numRows,1); // depends on control dependency: [if], data = [none] } for( int row = 0; row < input.numRows; row++ ) { double total = 0; int end = (row+1)*input.numCols; for( int index = row*input.numCols; index < end; index++ ) { total += input.data[index]; // depends on control dependency: [for], data = [index] } output.set(row,total); // depends on control dependency: [for], data = [row] } return output; } }
public class class_name { @Override public void paint(final RenderContext renderContext) { if (!(renderContext instanceof WebXmlRenderContext)) { throw new SystemException("Unable to render to " + renderContext); } PrintWriter writer = ((WebXmlRenderContext) renderContext).getWriter(); Template template = null; try { template = VelocityEngineFactory.getVelocityEngine().getTemplate(templateUrl); } catch (Exception ex) { String message = "Could not open velocity template \"" + templateUrl + "\" for \"" + this. getClass().getName() + "\""; LOG.error(message, ex); writer.println(message); return; } try { VelocityContext context = new VelocityContext(); fillContext(context); template.merge(context, writer); } catch (ResourceNotFoundException rnfe) { LOG.error("Could not find template " + templateUrl, rnfe); } catch (ParseErrorException pee) { // syntax error : problem parsing the template LOG.error("Parse problems", pee); } catch (MethodInvocationException mie) { // something invoked in the template // threw an exception Throwable wrapped = mie.getWrappedThrowable(); LOG.error("Problems with velocity", mie); if (wrapped != null) { LOG.error("Wrapped exception...", wrapped); } } catch (Exception e) { LOG.error("Problems with velocity", e); } } }
public class class_name { @Override public void paint(final RenderContext renderContext) { if (!(renderContext instanceof WebXmlRenderContext)) { throw new SystemException("Unable to render to " + renderContext); } PrintWriter writer = ((WebXmlRenderContext) renderContext).getWriter(); Template template = null; try { template = VelocityEngineFactory.getVelocityEngine().getTemplate(templateUrl); // depends on control dependency: [try], data = [none] } catch (Exception ex) { String message = "Could not open velocity template \"" + templateUrl + "\" for \"" + this. getClass().getName() + "\""; LOG.error(message, ex); writer.println(message); return; } // depends on control dependency: [catch], data = [none] try { VelocityContext context = new VelocityContext(); fillContext(context); // depends on control dependency: [try], data = [none] template.merge(context, writer); // depends on control dependency: [try], data = [none] } catch (ResourceNotFoundException rnfe) { LOG.error("Could not find template " + templateUrl, rnfe); } catch (ParseErrorException pee) { // depends on control dependency: [catch], data = [none] // syntax error : problem parsing the template LOG.error("Parse problems", pee); } catch (MethodInvocationException mie) { // depends on control dependency: [catch], data = [none] // something invoked in the template // threw an exception Throwable wrapped = mie.getWrappedThrowable(); LOG.error("Problems with velocity", mie); if (wrapped != null) { LOG.error("Wrapped exception...", wrapped); // depends on control dependency: [if], data = [none] } } catch (Exception e) { // depends on control dependency: [catch], data = [none] LOG.error("Problems with velocity", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static int countWildcardsOccurrences(String templateMessage, String occurrence) { if (templateMessage != null && occurrence != null) { final Pattern pattern = Pattern.compile(occurrence); final Matcher matcher = pattern.matcher(templateMessage); int count = 0; while (matcher.find()) { count++; } return count; } else { return 0; } } }
public class class_name { private static int countWildcardsOccurrences(String templateMessage, String occurrence) { if (templateMessage != null && occurrence != null) { final Pattern pattern = Pattern.compile(occurrence); final Matcher matcher = pattern.matcher(templateMessage); int count = 0; while (matcher.find()) { count++; // depends on control dependency: [while], data = [none] } return count; // depends on control dependency: [if], data = [none] } else { return 0; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static boolean isClass(String name) { if(name == null) { return false; } Matcher matcher = CLASS_NAME_PATTERN.matcher(name); return matcher.find(); } }
public class class_name { public static boolean isClass(String name) { if(name == null) { return false; // depends on control dependency: [if], data = [none] } Matcher matcher = CLASS_NAME_PATTERN.matcher(name); return matcher.find(); } }
public class class_name { private void uploadThumbImage(StorageNode client, InputStream inputStream, String masterFilename, FastImageFile fastImageFile) { ByteArrayInputStream thumbImageStream = null; ThumbImage thumbImage = fastImageFile.getThumbImage(); try { //生成缩略图片 thumbImageStream = generateThumbImageStream(inputStream, thumbImage); // 获取文件大小 long fileSize = thumbImageStream.available(); // 获取配置缩略图前缀 String prefixName = thumbImage.getPrefixName(); LOGGER.error("获取到缩略图前缀{}", prefixName); StorageUploadSlaveFileCommand command = new StorageUploadSlaveFileCommand(thumbImageStream, fileSize, masterFilename, prefixName, fastImageFile.getFileExtName()); connectionManager.executeFdfsCmd(client.getInetSocketAddress(), command); } catch (IOException e) { LOGGER.error("upload ThumbImage error", e); throw new FdfsUploadImageException("upload ThumbImage error", e.getCause()); } finally { IOUtils.closeQuietly(thumbImageStream); } } }
public class class_name { private void uploadThumbImage(StorageNode client, InputStream inputStream, String masterFilename, FastImageFile fastImageFile) { ByteArrayInputStream thumbImageStream = null; ThumbImage thumbImage = fastImageFile.getThumbImage(); try { //生成缩略图片 thumbImageStream = generateThumbImageStream(inputStream, thumbImage); // depends on control dependency: [try], data = [none] // 获取文件大小 long fileSize = thumbImageStream.available(); // 获取配置缩略图前缀 String prefixName = thumbImage.getPrefixName(); LOGGER.error("获取到缩略图前缀{}", prefixName); // depends on control dependency: [try], data = [none] StorageUploadSlaveFileCommand command = new StorageUploadSlaveFileCommand(thumbImageStream, fileSize, masterFilename, prefixName, fastImageFile.getFileExtName()); connectionManager.executeFdfsCmd(client.getInetSocketAddress(), command); // depends on control dependency: [try], data = [none] } catch (IOException e) { LOGGER.error("upload ThumbImage error", e); throw new FdfsUploadImageException("upload ThumbImage error", e.getCause()); } finally { // depends on control dependency: [catch], data = [none] IOUtils.closeQuietly(thumbImageStream); } } }
public class class_name { public int count() { Integer result = null; if (result == null) { SimpleQuery q = this.isKeysOnly()? this : this.clone().keysOnly(); result = getDatastoreService().prepare(q.getQuery()).countEntities(fetchOptions); } return result; } }
public class class_name { public int count() { Integer result = null; if (result == null) { SimpleQuery q = this.isKeysOnly()? this : this.clone().keysOnly(); result = getDatastoreService().prepare(q.getQuery()).countEntities(fetchOptions); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public synchronized JavaClassModel create(String qualifiedName) { // if a phantom exists, just convert it PhantomJavaClassModel phantom = new GraphService<>(getGraphContext(), PhantomJavaClassModel.class).getUniqueByProperty( JavaClassModel.QUALIFIED_NAME, qualifiedName); if (phantom != null) { GraphService.removeTypeFromModel(getGraphContext(), phantom, PhantomJavaClassModel.class); return phantom; } JavaClassModel javaClassModel = super.create(); setPropertiesFromName(javaClassModel, qualifiedName); return javaClassModel; } }
public class class_name { public synchronized JavaClassModel create(String qualifiedName) { // if a phantom exists, just convert it PhantomJavaClassModel phantom = new GraphService<>(getGraphContext(), PhantomJavaClassModel.class).getUniqueByProperty( JavaClassModel.QUALIFIED_NAME, qualifiedName); if (phantom != null) { GraphService.removeTypeFromModel(getGraphContext(), phantom, PhantomJavaClassModel.class); // depends on control dependency: [if], data = [none] return phantom; // depends on control dependency: [if], data = [none] } JavaClassModel javaClassModel = super.create(); setPropertiesFromName(javaClassModel, qualifiedName); return javaClassModel; } }