code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { @Override public EClass getIfcSensor() { if (ifcSensorEClass == null) { ifcSensorEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers() .get(591); } return ifcSensorEClass; } }
public class class_name { @Override public EClass getIfcSensor() { if (ifcSensorEClass == null) { ifcSensorEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers() .get(591); // depends on control dependency: [if], data = [none] } return ifcSensorEClass; } }
public class class_name { public synchronized boolean put(byte value) { if (available == capacity) { return false; } buffer[idxPut] = value; idxPut = (idxPut + 1) % capacity; available++; return true; } }
public class class_name { public synchronized boolean put(byte value) { if (available == capacity) { return false; // depends on control dependency: [if], data = [none] } buffer[idxPut] = value; idxPut = (idxPut + 1) % capacity; available++; return true; } }
public class class_name { public <T> SearchResult<T> querySearchResult(String query, Class<T> classOfT) { InputStream instream = null; try { Reader reader = new InputStreamReader(instream = queryForStream(query), "UTF-8"); JsonObject json = new JsonParser().parse(reader).getAsJsonObject(); SearchResult<T> sr = new SearchResult<T>(); sr.setTotalRows(getAsLong(json, "total_rows")); sr.setBookmark(getAsString(json, "bookmark")); if (json.has("rows")) { sr.setRows(getRows(json.getAsJsonArray("rows"), sr, classOfT)); } else if (json.has("groups")) { setGroups(json.getAsJsonArray("groups"), sr, classOfT); } if (json.has("counts")) { sr.setCounts(getFieldsCounts(json.getAsJsonObject("counts").entrySet())); } if (json.has("ranges")) { sr.setRanges(getFieldsCounts(json.getAsJsonObject("ranges").entrySet())); } return sr; } catch (UnsupportedEncodingException e) { // This should never happen as every implementation of the java platform is required // to support UTF-8. throw new RuntimeException(e); } finally { close(instream); } } }
public class class_name { public <T> SearchResult<T> querySearchResult(String query, Class<T> classOfT) { InputStream instream = null; try { Reader reader = new InputStreamReader(instream = queryForStream(query), "UTF-8"); JsonObject json = new JsonParser().parse(reader).getAsJsonObject(); SearchResult<T> sr = new SearchResult<T>(); sr.setTotalRows(getAsLong(json, "total_rows")); // depends on control dependency: [try], data = [none] sr.setBookmark(getAsString(json, "bookmark")); // depends on control dependency: [try], data = [none] if (json.has("rows")) { sr.setRows(getRows(json.getAsJsonArray("rows"), sr, classOfT)); // depends on control dependency: [if], data = [none] } else if (json.has("groups")) { setGroups(json.getAsJsonArray("groups"), sr, classOfT); // depends on control dependency: [if], data = [none] } if (json.has("counts")) { sr.setCounts(getFieldsCounts(json.getAsJsonObject("counts").entrySet())); // depends on control dependency: [if], data = [none] } if (json.has("ranges")) { sr.setRanges(getFieldsCounts(json.getAsJsonObject("ranges").entrySet())); // depends on control dependency: [if], data = [none] } return sr; // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException e) { // This should never happen as every implementation of the java platform is required // to support UTF-8. throw new RuntimeException(e); } finally { // depends on control dependency: [catch], data = [none] close(instream); } } }
public class class_name { public String getText(Chronology chrono, TemporalField field, long value, TextStyle style, Locale locale) { if (chrono == IsoChronology.INSTANCE || !(field instanceof ChronoField)) { return getText(field, value, style, locale); } int fieldIndex; int fieldValue; if (field == ERA) { fieldIndex = Calendar.ERA; /* J2ObjC removed: Only "gregorian" and "julian" calendars are supported. if (chrono == JapaneseChronology.INSTANCE) { if (value == -999) { fieldValue = 0; } else { fieldValue = (int) value + 2; } } else { fieldValue = (int) value; } */ fieldValue = (int) value; } else if (field == MONTH_OF_YEAR) { fieldIndex = Calendar.MONTH; fieldValue = (int) value - 1; } else if (field == DAY_OF_WEEK) { fieldIndex = Calendar.DAY_OF_WEEK; fieldValue = (int) value + 1; if (fieldValue > 7) { fieldValue = Calendar.SUNDAY; } } else if (field == AMPM_OF_DAY) { fieldIndex = Calendar.AM_PM; fieldValue = (int) value; } else { return null; } return CalendarDataUtility.retrieveJavaTimeFieldValueName( chrono.getCalendarType(), fieldIndex, fieldValue, style.toCalendarStyle(), locale); } }
public class class_name { public String getText(Chronology chrono, TemporalField field, long value, TextStyle style, Locale locale) { if (chrono == IsoChronology.INSTANCE || !(field instanceof ChronoField)) { return getText(field, value, style, locale); // depends on control dependency: [if], data = [none] } int fieldIndex; int fieldValue; if (field == ERA) { fieldIndex = Calendar.ERA; // depends on control dependency: [if], data = [none] /* J2ObjC removed: Only "gregorian" and "julian" calendars are supported. if (chrono == JapaneseChronology.INSTANCE) { if (value == -999) { fieldValue = 0; } else { fieldValue = (int) value + 2; } } else { fieldValue = (int) value; } */ fieldValue = (int) value; // depends on control dependency: [if], data = [none] } else if (field == MONTH_OF_YEAR) { fieldIndex = Calendar.MONTH; // depends on control dependency: [if], data = [none] fieldValue = (int) value - 1; // depends on control dependency: [if], data = [none] } else if (field == DAY_OF_WEEK) { fieldIndex = Calendar.DAY_OF_WEEK; // depends on control dependency: [if], data = [none] fieldValue = (int) value + 1; // depends on control dependency: [if], data = [none] if (fieldValue > 7) { fieldValue = Calendar.SUNDAY; // depends on control dependency: [if], data = [none] } } else if (field == AMPM_OF_DAY) { fieldIndex = Calendar.AM_PM; // depends on control dependency: [if], data = [none] fieldValue = (int) value; // depends on control dependency: [if], data = [none] } else { return null; // depends on control dependency: [if], data = [none] } return CalendarDataUtility.retrieveJavaTimeFieldValueName( chrono.getCalendarType(), fieldIndex, fieldValue, style.toCalendarStyle(), locale); } }
public class class_name { @Override public int doEndTag() throws JspException { try { String imgSrc = getImgSrcToRender(); if (null == var) { try { pageContext.getOut().print(imgSrc); } catch (IOException e) { throw new JspException(e); } } else { pageContext.setAttribute(var, imgSrc); } return super.doEndTag(); } finally { // Reset the Thread local for the Jawr context ThreadLocalJawrContext.reset(); } } }
public class class_name { @Override public int doEndTag() throws JspException { try { String imgSrc = getImgSrcToRender(); if (null == var) { try { pageContext.getOut().print(imgSrc); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new JspException(e); } // depends on control dependency: [catch], data = [none] } else { pageContext.setAttribute(var, imgSrc); } return super.doEndTag(); } finally { // Reset the Thread local for the Jawr context ThreadLocalJawrContext.reset(); } } }
public class class_name { private static String getFileExtension(String path) { int index = path.lastIndexOf('.'); if (index == -1) { return null; } return path.substring(index + 1).toLowerCase(Locale.ENGLISH); } }
public class class_name { private static String getFileExtension(String path) { int index = path.lastIndexOf('.'); if (index == -1) { return null; // depends on control dependency: [if], data = [none] } return path.substring(index + 1).toLowerCase(Locale.ENGLISH); } }
public class class_name { protected static void registerForPushNotification(Context context) { if (checkPlayService(context)) { WonderPushFirebaseMessagingService.fetchInstanceId(); } else { Log.w(TAG, "Google Play Services not present. Check your setup. If on an emulator, use a Google APIs system image."); } } }
public class class_name { protected static void registerForPushNotification(Context context) { if (checkPlayService(context)) { WonderPushFirebaseMessagingService.fetchInstanceId(); // depends on control dependency: [if], data = [none] } else { Log.w(TAG, "Google Play Services not present. Check your setup. If on an emulator, use a Google APIs system image."); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Result<MZXMLIndexElement> buildIndex(final IndexBuilder.Info info) throws Exception { Result<MZXMLIndexElement> result = new IndexBuilder.Result<>(info); int numOpeningScanTagsFound = 0; vars.reset(); XMLStreamReaderImpl reader = (pool == null) ? new XMLStreamReaderImpl() : pool.borrowObject(); try { reader.setInput(info.is, StandardCharsets.UTF_8.name()); LogHelper.setJavolutionLogLevelFatal(); final XMLStreamReaderImpl.LocationImpl location = reader.getLocation(); int eventType = XMLStreamConstants.END_DOCUMENT; CharArray localName, attr; Attributes attrs; do { // Read the next XML element try { eventType = reader.next(); } catch (XMLStreamException e) { if (e instanceof XMLUnexpectedEndTagException) { // it's ok to have unexpected closing tags eventType = reader.getEventType(); } else if (e instanceof XMLUnexpectedEndOfDocumentException) { // as we're reading arbitrary chunks of file, we will almost always finish parsing by hitting this condition break; } else { throw new FileParsingException(e); } } // Process the read event switch (eventType) { case XMLStreamConstants.START_ELEMENT: localName = reader.getLocalName(); attrs = reader.getAttributes(); if (localName.contentEquals(MZXMLMultiSpectraParser.TAG.SCAN.name)) { if (vars.offsetLo != null) { // this means we've encountered nested Spectrum tags long lastStartTagPos = location.getLastStartTagPos(); vars.length = (int) (vars.offsetLo - lastStartTagPos); addAndFlush(result, info.offsetInFile); } //tagScanStart(reader); vars.offsetLo = location.getLastStartTagPos(); try { vars.scanNumRaw = attrs.getValue(MZXMLMultiSpectraParser.ATTR.SCAN_NUM.name) .toInt(); } catch (NumberFormatException e) { throw new FileParsingException("Malformed scan number while building index", e); } } break; case XMLStreamConstants.CHARACTERS: break; case XMLStreamConstants.END_ELEMENT: localName = reader.getLocalName(); if (localName.contentEquals(MZXMLMultiSpectraParser.TAG.SCAN.name)) { vars.offsetHi = location.getTotalCharsRead(); addAndFlush(result, info.offsetInFile); } break; } } while (eventType != XMLStreamConstants.END_DOCUMENT); } finally { addAndFlush(result, info.offsetInFile); // we need to return the reaer to the pool, if we borrowed it from there if (pool != null && reader != null) { pool.returnObject(reader); } } return result; } }
public class class_name { @Override public Result<MZXMLIndexElement> buildIndex(final IndexBuilder.Info info) throws Exception { Result<MZXMLIndexElement> result = new IndexBuilder.Result<>(info); int numOpeningScanTagsFound = 0; vars.reset(); XMLStreamReaderImpl reader = (pool == null) ? new XMLStreamReaderImpl() : pool.borrowObject(); try { reader.setInput(info.is, StandardCharsets.UTF_8.name()); LogHelper.setJavolutionLogLevelFatal(); final XMLStreamReaderImpl.LocationImpl location = reader.getLocation(); int eventType = XMLStreamConstants.END_DOCUMENT; CharArray localName, attr; Attributes attrs; do { // Read the next XML element try { eventType = reader.next(); // depends on control dependency: [try], data = [none] } catch (XMLStreamException e) { if (e instanceof XMLUnexpectedEndTagException) { // it's ok to have unexpected closing tags eventType = reader.getEventType(); // depends on control dependency: [if], data = [none] } else if (e instanceof XMLUnexpectedEndOfDocumentException) { // as we're reading arbitrary chunks of file, we will almost always finish parsing by hitting this condition break; } else { throw new FileParsingException(e); } } // depends on control dependency: [catch], data = [none] // Process the read event switch (eventType) { case XMLStreamConstants.START_ELEMENT: localName = reader.getLocalName(); attrs = reader.getAttributes(); if (localName.contentEquals(MZXMLMultiSpectraParser.TAG.SCAN.name)) { if (vars.offsetLo != null) { // this means we've encountered nested Spectrum tags long lastStartTagPos = location.getLastStartTagPos(); vars.length = (int) (vars.offsetLo - lastStartTagPos); // depends on control dependency: [if], data = [(vars.offsetLo] addAndFlush(result, info.offsetInFile); // depends on control dependency: [if], data = [none] } //tagScanStart(reader); vars.offsetLo = location.getLastStartTagPos(); // depends on control dependency: [if], data = [none] try { vars.scanNumRaw = attrs.getValue(MZXMLMultiSpectraParser.ATTR.SCAN_NUM.name) .toInt(); // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { throw new FileParsingException("Malformed scan number while building index", e); } // depends on control dependency: [catch], data = [none] } break; case XMLStreamConstants.CHARACTERS: break; case XMLStreamConstants.END_ELEMENT: localName = reader.getLocalName(); if (localName.contentEquals(MZXMLMultiSpectraParser.TAG.SCAN.name)) { vars.offsetHi = location.getTotalCharsRead(); // depends on control dependency: [if], data = [none] addAndFlush(result, info.offsetInFile); // depends on control dependency: [if], data = [none] } break; } } while (eventType != XMLStreamConstants.END_DOCUMENT); } finally { addAndFlush(result, info.offsetInFile); // we need to return the reaer to the pool, if we borrowed it from there if (pool != null && reader != null) { pool.returnObject(reader); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { @Override public void visitEnd() { if (name == null || name.length() == 0) { reporter.error("The 'name' attribute of @Model from " + workbench.getType().getClassName() + " must be " + "set"); return; } // Check the type of the field if (! Type.getDescriptor(Crud.class).equals(node.desc)) { reporter.warn("The type of the field " + field + " from " + workbench.getType().getClassName() + " should" + " be " + Crud.class.getName() + " because the field is annotated with @Model"); } Element requires = new Element("requires", ""); requires.addAttribute(new Attribute("field", field)); requires.addAttribute(new Attribute("filter", getFilter(name))); workbench.getElements().put(requires, null); } }
public class class_name { @Override public void visitEnd() { if (name == null || name.length() == 0) { reporter.error("The 'name' attribute of @Model from " + workbench.getType().getClassName() + " must be " + "set"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // Check the type of the field if (! Type.getDescriptor(Crud.class).equals(node.desc)) { reporter.warn("The type of the field " + field + " from " + workbench.getType().getClassName() + " should" + " be " + Crud.class.getName() + " because the field is annotated with @Model"); // depends on control dependency: [if], data = [none] } Element requires = new Element("requires", ""); requires.addAttribute(new Attribute("field", field)); requires.addAttribute(new Attribute("filter", getFilter(name))); workbench.getElements().put(requires, null); } }
public class class_name { protected Box createTfra(StreamingTrack track) { TrackFragmentRandomAccessBox tfra = new TrackFragmentRandomAccessBox(); tfra.setVersion(1); // use long offsets and times long[] offsets = tfraOffsets.get(track); long[] times = tfraTimes.get(track); List<TrackFragmentRandomAccessBox.Entry> entries = new ArrayList<TrackFragmentRandomAccessBox.Entry>(times.length); for (int i = 0; i < times.length; i++) { entries.add(new TrackFragmentRandomAccessBox.Entry(times[i], offsets[i], 1, 1, 1)); } tfra.setEntries(entries); tfra.setTrackId(track.getTrackExtension(TrackIdTrackExtension.class).getTrackId()); return tfra; } }
public class class_name { protected Box createTfra(StreamingTrack track) { TrackFragmentRandomAccessBox tfra = new TrackFragmentRandomAccessBox(); tfra.setVersion(1); // use long offsets and times long[] offsets = tfraOffsets.get(track); long[] times = tfraTimes.get(track); List<TrackFragmentRandomAccessBox.Entry> entries = new ArrayList<TrackFragmentRandomAccessBox.Entry>(times.length); for (int i = 0; i < times.length; i++) { entries.add(new TrackFragmentRandomAccessBox.Entry(times[i], offsets[i], 1, 1, 1)); // depends on control dependency: [for], data = [i] } tfra.setEntries(entries); tfra.setTrackId(track.getTrackExtension(TrackIdTrackExtension.class).getTrackId()); return tfra; } }
public class class_name { public boolean containsValue(final int value) { boolean found = false; if (value != missingValue) { final int[] entries = this.entries; @DoNotSub final int length = entries.length; for (@DoNotSub int valueIndex = 1; valueIndex < length; valueIndex += 2) { if (value == entries[valueIndex]) { found = true; break; } } } return found; } }
public class class_name { public boolean containsValue(final int value) { boolean found = false; if (value != missingValue) { final int[] entries = this.entries; @DoNotSub final int length = entries.length; for (@DoNotSub int valueIndex = 1; valueIndex < length; valueIndex += 2) { if (value == entries[valueIndex]) { found = true; // depends on control dependency: [if], data = [none] break; } } } return found; } }
public class class_name { static boolean loadDat(String path) { try { ByteArray byteArray = ByteArray.createByteArray(path + Predefine.BIN_EXT); if (byteArray == null) return false; int size = byteArray.nextInt(); CoreDictionary.Attribute[] attributes = new CoreDictionary.Attribute[size]; final Nature[] natureIndexArray = Nature.values(); for (int i = 0; i < size; ++i) { // 第一个是全部频次,第二个是词性个数 int currentTotalFrequency = byteArray.nextInt(); int length = byteArray.nextInt(); attributes[i] = new CoreDictionary.Attribute(length); attributes[i].totalFrequency = currentTotalFrequency; for (int j = 0; j < length; ++j) { attributes[i].nature[j] = natureIndexArray[byteArray.nextInt()]; attributes[i].frequency[j] = byteArray.nextInt(); } } if (!trie.load(byteArray, attributes) || byteArray.hasMore()) return false; } catch (Exception e) { logger.warning("读取失败,问题发生在" + e); return false; } return true; } }
public class class_name { static boolean loadDat(String path) { try { ByteArray byteArray = ByteArray.createByteArray(path + Predefine.BIN_EXT); if (byteArray == null) return false; int size = byteArray.nextInt(); CoreDictionary.Attribute[] attributes = new CoreDictionary.Attribute[size]; final Nature[] natureIndexArray = Nature.values(); for (int i = 0; i < size; ++i) { // 第一个是全部频次,第二个是词性个数 int currentTotalFrequency = byteArray.nextInt(); int length = byteArray.nextInt(); attributes[i] = new CoreDictionary.Attribute(length); // depends on control dependency: [for], data = [i] attributes[i].totalFrequency = currentTotalFrequency; // depends on control dependency: [for], data = [i] for (int j = 0; j < length; ++j) { attributes[i].nature[j] = natureIndexArray[byteArray.nextInt()]; // depends on control dependency: [for], data = [j] attributes[i].frequency[j] = byteArray.nextInt(); // depends on control dependency: [for], data = [j] } } if (!trie.load(byteArray, attributes) || byteArray.hasMore()) return false; } catch (Exception e) { logger.warning("读取失败,问题发生在" + e); return false; } // depends on control dependency: [catch], data = [none] return true; } }
public class class_name { private <I extends Message, O extends Message> ListenableFuture<JsonRpcResponse> invoke( ServerMethod<I, O> method, JsonObject parameter, JsonElement id) { I request; try { request = (I) Messages.fromJson(method.inputBuilder(), parameter); } catch (Exception e) { serverLogger.logServerFailure(method, e); SettableFuture<JsonRpcResponse> future = SettableFuture.create(); future.setException(e); return future; } ListenableFuture<O> response = method.invoke(request); return Futures.transform(response, new JsonConverter(id), TRANSFORM_EXECUTOR); } }
public class class_name { private <I extends Message, O extends Message> ListenableFuture<JsonRpcResponse> invoke( ServerMethod<I, O> method, JsonObject parameter, JsonElement id) { I request; try { request = (I) Messages.fromJson(method.inputBuilder(), parameter); // depends on control dependency: [try], data = [none] } catch (Exception e) { serverLogger.logServerFailure(method, e); SettableFuture<JsonRpcResponse> future = SettableFuture.create(); future.setException(e); return future; } // depends on control dependency: [catch], data = [none] ListenableFuture<O> response = method.invoke(request); return Futures.transform(response, new JsonConverter(id), TRANSFORM_EXECUTOR); } }
public class class_name { public AioSession read(CompletionHandler<Integer, AioSession> handler) { if (isOpen()) { this.readBuffer.clear(); this.channel.read(this.readBuffer, Math.max(this.readTimeout, 0L), TimeUnit.MILLISECONDS, this, handler); } return this; } }
public class class_name { public AioSession read(CompletionHandler<Integer, AioSession> handler) { if (isOpen()) { this.readBuffer.clear(); // depends on control dependency: [if], data = [none] this.channel.read(this.readBuffer, Math.max(this.readTimeout, 0L), TimeUnit.MILLISECONDS, this, handler); // depends on control dependency: [if], data = [none] } return this; } }
public class class_name { private void printDocument(Node node, Hashtable properties) { // if node is empty do nothing... (Recursion) if (node == null) { return; } // initialise local variables int type = node.getNodeType(); String name = node.getNodeName(); // detect node type switch (type) { case Node.DOCUMENT_NODE: printDocument(((Document)node).getDocumentElement(), properties); break; case Node.ELEMENT_NODE: // check if its the <head> node. Nothing inside the <head> node // must be // part of the output, but we must scan the content of this // node to get all // <meta> tags if (name.equals(NODE_HEAD)) { m_write = false; } // scan element node; if a block has to be removed or replaced, // break and discard child nodes transformStartElement(node, properties); // test if node has children NodeList children = node.getChildNodes(); if (children != null) { int len = children.getLength(); for (int i = 0; i < len; i++) { // recursively call printDocument with all child nodes printDocument(children.item(i), properties); } } break; case Node.TEXT_NODE: // replace subStrings in text nodes transformTextNode(node); break; default: break; } // end of recursion, add eventual endtags and suffixes switch (type) { case Node.ELEMENT_NODE: // analyse endtags and add them to output transformEndElement(node); if (node.getNodeName().equals(NODE_HEAD)) { m_write = true; } break; case Node.DOCUMENT_NODE: break; default: break; } } }
public class class_name { private void printDocument(Node node, Hashtable properties) { // if node is empty do nothing... (Recursion) if (node == null) { return; // depends on control dependency: [if], data = [none] } // initialise local variables int type = node.getNodeType(); String name = node.getNodeName(); // detect node type switch (type) { case Node.DOCUMENT_NODE: printDocument(((Document)node).getDocumentElement(), properties); break; case Node.ELEMENT_NODE: // check if its the <head> node. Nothing inside the <head> node // must be // part of the output, but we must scan the content of this // node to get all // <meta> tags if (name.equals(NODE_HEAD)) { m_write = false; // depends on control dependency: [if], data = [none] } // scan element node; if a block has to be removed or replaced, // break and discard child nodes transformStartElement(node, properties); // test if node has children NodeList children = node.getChildNodes(); if (children != null) { int len = children.getLength(); for (int i = 0; i < len; i++) { // recursively call printDocument with all child nodes printDocument(children.item(i), properties); // depends on control dependency: [for], data = [i] } } break; case Node.TEXT_NODE: // replace subStrings in text nodes transformTextNode(node); break; default: break; } // end of recursion, add eventual endtags and suffixes switch (type) { case Node.ELEMENT_NODE: // analyse endtags and add them to output transformEndElement(node); if (node.getNodeName().equals(NODE_HEAD)) { m_write = true; // depends on control dependency: [if], data = [none] } break; case Node.DOCUMENT_NODE: break; default: break; } } }
public class class_name { protected void setUser(String user) { for (INodeHardLinkFile linkedFile : linkedFiles) { linkedFile.setUser(user, false); } } }
public class class_name { protected void setUser(String user) { for (INodeHardLinkFile linkedFile : linkedFiles) { linkedFile.setUser(user, false); // depends on control dependency: [for], data = [linkedFile] } } }
public class class_name { @Override public <E> List<E> findAll(Class<E> entityClass, String[] columnsToSelect, Object... keys) { List<E> results = new ArrayList<E>(); if (columnsToSelect == null) { columnsToSelect = new String[0]; } if (keys == null) { EntityMetadata entityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entityClass); MetamodelImpl metamodel = (MetamodelImpl) KunderaMetadataManager.getMetamodel(kunderaMetadata, entityMetadata.getPersistenceUnit()); EntityType entityType = metamodel.entity(entityMetadata.getEntityClazz()); Table schemaTable = tableAPI.getTable(entityMetadata.getTableName()); // KunderaCoreUtils.showQuery("Get all records for " + // entityMetadata.getTableName(), showQuery); Iterator<Row> rowsIter = tableAPI.tableIterator(schemaTable.createPrimaryKey(), null, null); Map<String, Object> relationMap = initialize(entityMetadata); try { results = scrollAndPopulate(null, entityMetadata, metamodel, schemaTable, rowsIter, relationMap, Arrays.asList(columnsToSelect)); } catch (Exception e) { log.error("Error while finding records , Caused By :" + e + "."); throw new PersistenceException(e); } } else { for (Object key : keys) { results.add((E) find(entityClass, key, Arrays.asList(columnsToSelect))); } } return results; } }
public class class_name { @Override public <E> List<E> findAll(Class<E> entityClass, String[] columnsToSelect, Object... keys) { List<E> results = new ArrayList<E>(); if (columnsToSelect == null) { columnsToSelect = new String[0]; // depends on control dependency: [if], data = [none] } if (keys == null) { EntityMetadata entityMetadata = KunderaMetadataManager.getEntityMetadata(kunderaMetadata, entityClass); MetamodelImpl metamodel = (MetamodelImpl) KunderaMetadataManager.getMetamodel(kunderaMetadata, entityMetadata.getPersistenceUnit()); EntityType entityType = metamodel.entity(entityMetadata.getEntityClazz()); Table schemaTable = tableAPI.getTable(entityMetadata.getTableName()); // KunderaCoreUtils.showQuery("Get all records for " + // entityMetadata.getTableName(), showQuery); Iterator<Row> rowsIter = tableAPI.tableIterator(schemaTable.createPrimaryKey(), null, null); Map<String, Object> relationMap = initialize(entityMetadata); try { results = scrollAndPopulate(null, entityMetadata, metamodel, schemaTable, rowsIter, relationMap, Arrays.asList(columnsToSelect)); // depends on control dependency: [try], data = [none] } catch (Exception e) { log.error("Error while finding records , Caused By :" + e + "."); throw new PersistenceException(e); } // depends on control dependency: [catch], data = [none] } else { for (Object key : keys) { results.add((E) find(entityClass, key, Arrays.asList(columnsToSelect))); // depends on control dependency: [for], data = [key] } } return results; } }
public class class_name { public SimpleExpression<byte[]> asBinary() { if (binary == null) { binary = Expressions.operation(byte[].class, SpatialOps.AS_BINARY, mixin); } return binary; } }
public class class_name { public SimpleExpression<byte[]> asBinary() { if (binary == null) { binary = Expressions.operation(byte[].class, SpatialOps.AS_BINARY, mixin); // depends on control dependency: [if], data = [none] } return binary; } }
public class class_name { public void addConverterFactory(final IStringConverterFactory converterFactory) { addConverterInstanceFactory(new IStringConverterInstanceFactory() { @SuppressWarnings("unchecked") @Override public IStringConverter<?> getConverterInstance(Parameter parameter, Class<?> forType, String optionName) { final Class<? extends IStringConverter<?>> converterClass = converterFactory.getConverter(forType); try { if(optionName == null) { optionName = parameter.names().length > 0 ? parameter.names()[0] : "[Main class]"; } return converterClass != null ? instantiateConverter(optionName, converterClass) : null; } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new ParameterException(e); } } }); } }
public class class_name { public void addConverterFactory(final IStringConverterFactory converterFactory) { addConverterInstanceFactory(new IStringConverterInstanceFactory() { @SuppressWarnings("unchecked") @Override public IStringConverter<?> getConverterInstance(Parameter parameter, Class<?> forType, String optionName) { final Class<? extends IStringConverter<?>> converterClass = converterFactory.getConverter(forType); try { if(optionName == null) { optionName = parameter.names().length > 0 ? parameter.names()[0] : "[Main class]"; // depends on control dependency: [if], data = [none] } return converterClass != null ? instantiateConverter(optionName, converterClass) : null; // depends on control dependency: [try], data = [none] } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) { throw new ParameterException(e); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public int getNextPageWithoutDeletingIt(int prevPage) { reportFreePage(prevPage); if (iter.hasNextULL()) { LongLongIndex.LLEntry e = iter.nextULL(); long pageId = e.getKey(); long value = e.getValue(); // do not return pages that are PID_DO_NOT_USE (i.e. negative value). while ((value > maxFreeTxId || value < 0) && iter.hasNextULL()) { e = iter.nextULL(); pageId = e.getKey(); value = e.getValue(); } if (value >= 0 && value <= maxFreeTxId) { //label the page as invalid //TODO or implement iter.updateValue() ?! idx.insertLong(pageId, -currentTxId); iter.close(); iter = idx.iterator(pageId+1, Long.MAX_VALUE); //it should be sufficient to set this only when the new page is taken //from the index i.o. the Atomic counter... hasWritingSettled = false; return (int) pageId; } } //If we didn't find any we allocate a new page. return lastPage.addAndGet(1); } }
public class class_name { public int getNextPageWithoutDeletingIt(int prevPage) { reportFreePage(prevPage); if (iter.hasNextULL()) { LongLongIndex.LLEntry e = iter.nextULL(); long pageId = e.getKey(); long value = e.getValue(); // do not return pages that are PID_DO_NOT_USE (i.e. negative value). while ((value > maxFreeTxId || value < 0) && iter.hasNextULL()) { e = iter.nextULL(); // depends on control dependency: [while], data = [none] pageId = e.getKey(); // depends on control dependency: [while], data = [none] value = e.getValue(); // depends on control dependency: [while], data = [none] } if (value >= 0 && value <= maxFreeTxId) { //label the page as invalid //TODO or implement iter.updateValue() ?! idx.insertLong(pageId, -currentTxId); // depends on control dependency: [if], data = [none] iter.close(); // depends on control dependency: [if], data = [none] iter = idx.iterator(pageId+1, Long.MAX_VALUE); // depends on control dependency: [if], data = [none] //it should be sufficient to set this only when the new page is taken //from the index i.o. the Atomic counter... hasWritingSettled = false; // depends on control dependency: [if], data = [none] return (int) pageId; // depends on control dependency: [if], data = [none] } } //If we didn't find any we allocate a new page. return lastPage.addAndGet(1); } }
public class class_name { public static Method getMethod(Class<?> clazz, boolean ignoreCase, String methodName, Class<?>... paramTypes) throws SecurityException { if (null == clazz || StrUtil.isBlank(methodName)) { return null; } final Method[] methods = getMethods(clazz); if (ArrayUtil.isNotEmpty(methods)) { for (Method method : methods) { if (StrUtil.equals(methodName, method.getName(), ignoreCase)) { if (ClassUtil.isAllAssignableFrom(method.getParameterTypes(), paramTypes)) { return method; } } } } return null; } }
public class class_name { public static Method getMethod(Class<?> clazz, boolean ignoreCase, String methodName, Class<?>... paramTypes) throws SecurityException { if (null == clazz || StrUtil.isBlank(methodName)) { return null; // depends on control dependency: [if], data = [none] } final Method[] methods = getMethods(clazz); if (ArrayUtil.isNotEmpty(methods)) { for (Method method : methods) { if (StrUtil.equals(methodName, method.getName(), ignoreCase)) { if (ClassUtil.isAllAssignableFrom(method.getParameterTypes(), paramTypes)) { return method; // depends on control dependency: [if], data = [none] } } } } return null; } }
public class class_name { public static double calculateSlope( GridNode node, double flowValue ) { double value = doubleNovalue; if (!isNovalue(flowValue)) { int flowDir = (int) flowValue; if (flowDir != 10) { Direction direction = Direction.forFlow(flowDir); double distance = direction.getDistance(node.xRes, node.yRes); double currentElevation = node.elevation; double nextElevation = node.getElevationAt(direction); value = (currentElevation - nextElevation) / distance; } } return value; } }
public class class_name { public static double calculateSlope( GridNode node, double flowValue ) { double value = doubleNovalue; if (!isNovalue(flowValue)) { int flowDir = (int) flowValue; if (flowDir != 10) { Direction direction = Direction.forFlow(flowDir); double distance = direction.getDistance(node.xRes, node.yRes); double currentElevation = node.elevation; double nextElevation = node.getElevationAt(direction); value = (currentElevation - nextElevation) / distance; // depends on control dependency: [if], data = [none] } } return value; } }
public class class_name { public static OrderBook adaptOrderBook( final RippleOrderBook rippleOrderBook, final RippleMarketDataParams params, final CurrencyPair currencyPair) { final String orderBook = rippleOrderBook.getOrderBook(); // e.g. XRP/BTC+rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B final String[] splitPair = orderBook.split("/"); final String[] baseSplit = splitPair[0].split("\\+"); final String baseSymbol = baseSplit[0]; if (baseSymbol.equals(currencyPair.base.getCurrencyCode()) == false) { throw new IllegalStateException( String.format( "base symbol in Ripple order book %s does not match requested base %s", orderBook, currencyPair)); } final String baseCounterparty; if (baseSymbol.equals("XRP")) { baseCounterparty = ""; // native currency } else { baseCounterparty = baseSplit[1]; } if (baseCounterparty.equals(params.getBaseCounterparty()) == false) { throw new IllegalStateException( String.format( "base counterparty in Ripple order book %s does not match requested counterparty %s", orderBook, params.getBaseCounterparty())); } final String[] counterSplit = splitPair[1].split("\\+"); final String counterSymbol = counterSplit[0]; if (counterSymbol.equals(currencyPair.counter.getCurrencyCode()) == false) { throw new IllegalStateException( String.format( "counter symbol in Ripple order book %s does not match requested base %s", orderBook, currencyPair)); } final String counterCounterparty; if (counterSymbol.equals("XRP")) { counterCounterparty = ""; // native currency } else { counterCounterparty = counterSplit[1]; } if (counterCounterparty.equals(params.getCounterCounterparty()) == false) { throw new IllegalStateException( String.format( "counter counterparty in Ripple order book %s does not match requested counterparty %s", orderBook, params.getCounterCounterparty())); } final List<LimitOrder> bids = createOrders( currencyPair, OrderType.BID, rippleOrderBook.getBids(), baseCounterparty, counterCounterparty); final List<LimitOrder> asks = createOrders( currencyPair, OrderType.ASK, rippleOrderBook.getAsks(), baseCounterparty, counterCounterparty); return new OrderBook(null, asks, bids); } }
public class class_name { public static OrderBook adaptOrderBook( final RippleOrderBook rippleOrderBook, final RippleMarketDataParams params, final CurrencyPair currencyPair) { final String orderBook = rippleOrderBook.getOrderBook(); // e.g. XRP/BTC+rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B final String[] splitPair = orderBook.split("/"); final String[] baseSplit = splitPair[0].split("\\+"); final String baseSymbol = baseSplit[0]; if (baseSymbol.equals(currencyPair.base.getCurrencyCode()) == false) { throw new IllegalStateException( String.format( "base symbol in Ripple order book %s does not match requested base %s", orderBook, currencyPair)); } final String baseCounterparty; if (baseSymbol.equals("XRP")) { baseCounterparty = ""; // native currency // depends on control dependency: [if], data = [none] } else { baseCounterparty = baseSplit[1]; // depends on control dependency: [if], data = [none] } if (baseCounterparty.equals(params.getBaseCounterparty()) == false) { throw new IllegalStateException( String.format( "base counterparty in Ripple order book %s does not match requested counterparty %s", orderBook, params.getBaseCounterparty())); } final String[] counterSplit = splitPair[1].split("\\+"); final String counterSymbol = counterSplit[0]; if (counterSymbol.equals(currencyPair.counter.getCurrencyCode()) == false) { throw new IllegalStateException( String.format( "counter symbol in Ripple order book %s does not match requested base %s", orderBook, currencyPair)); } final String counterCounterparty; if (counterSymbol.equals("XRP")) { counterCounterparty = ""; // native currency // depends on control dependency: [if], data = [none] } else { counterCounterparty = counterSplit[1]; // depends on control dependency: [if], data = [none] } if (counterCounterparty.equals(params.getCounterCounterparty()) == false) { throw new IllegalStateException( String.format( "counter counterparty in Ripple order book %s does not match requested counterparty %s", orderBook, params.getCounterCounterparty())); } final List<LimitOrder> bids = createOrders( currencyPair, OrderType.BID, rippleOrderBook.getBids(), baseCounterparty, counterCounterparty); final List<LimitOrder> asks = createOrders( currencyPair, OrderType.ASK, rippleOrderBook.getAsks(), baseCounterparty, counterCounterparty); return new OrderBook(null, asks, bids); } }
public class class_name { public PatternElementMatch getFirstLabeledMatch(String label) { PatternElementMatch cur = this, result = null; while (cur != null) { String elementLabel = cur.patternElement.getLabel(); if (elementLabel != null && elementLabel.equals(label)) { result = cur; } cur = cur.prev; } return result; } }
public class class_name { public PatternElementMatch getFirstLabeledMatch(String label) { PatternElementMatch cur = this, result = null; while (cur != null) { String elementLabel = cur.patternElement.getLabel(); if (elementLabel != null && elementLabel.equals(label)) { result = cur; // depends on control dependency: [if], data = [none] } cur = cur.prev; // depends on control dependency: [while], data = [none] } return result; } }
public class class_name { @SuppressWarnings("unchecked") @Override public ChronoFormatter<T> with(Locale locale) { if (locale.equals(this.globalAttributes.getLocale())) { return this; } else if (this.chronology.getChronoType() == CalendarDate.class) { String pattern = this.getPattern(); if (pattern.isEmpty()) { FormatProcessor<?> processor = this.steps.get(0).getProcessor(); if (processor instanceof StyleProcessor) { DisplayMode style = DisplayMode.ofStyle( StyleProcessor.class.cast(processor).getDateStyle().getStyleValue()); return (ChronoFormatter<T>) ChronoFormatter.ofGenericCalendarStyle(style, locale); } } else { return (ChronoFormatter<T>) ChronoFormatter.ofGenericCalendarPattern(pattern, locale); } } else if ((this.overrideHandler != null) && this.overrideHandler.isGeneric()) { String pattern = this.getPattern(); if (!pattern.isEmpty()) { return (ChronoFormatter<T>) ChronoFormatter.ofGenericMomentPattern(pattern, locale); } } return new ChronoFormatter<>(this, this.globalAttributes.withLocale(locale)); } }
public class class_name { @SuppressWarnings("unchecked") @Override public ChronoFormatter<T> with(Locale locale) { if (locale.equals(this.globalAttributes.getLocale())) { return this; // depends on control dependency: [if], data = [none] } else if (this.chronology.getChronoType() == CalendarDate.class) { String pattern = this.getPattern(); if (pattern.isEmpty()) { FormatProcessor<?> processor = this.steps.get(0).getProcessor(); if (processor instanceof StyleProcessor) { DisplayMode style = DisplayMode.ofStyle( StyleProcessor.class.cast(processor).getDateStyle().getStyleValue()); return (ChronoFormatter<T>) ChronoFormatter.ofGenericCalendarStyle(style, locale); // depends on control dependency: [if], data = [none] } } else { return (ChronoFormatter<T>) ChronoFormatter.ofGenericCalendarPattern(pattern, locale); // depends on control dependency: [if], data = [none] } } else if ((this.overrideHandler != null) && this.overrideHandler.isGeneric()) { String pattern = this.getPattern(); if (!pattern.isEmpty()) { return (ChronoFormatter<T>) ChronoFormatter.ofGenericMomentPattern(pattern, locale); // depends on control dependency: [if], data = [none] } } return new ChronoFormatter<>(this, this.globalAttributes.withLocale(locale)); } }
public class class_name { public void checkParameterizables() { LoggingConfiguration.setVerbose(Level.VERBOSE); knownParameterizables = new ArrayList<>(); try { Enumeration<URL> us = getClass().getClassLoader().getResources(ELKIServiceLoader.RESOURCE_PREFIX); while(us.hasMoreElements()) { URL u = us.nextElement(); if("file".equals(u.getProtocol())) { for(String prop : new File(u.toURI()).list()) { try { knownParameterizables.add(Class.forName(prop)); } catch(ClassNotFoundException e) { LOG.warning("Service file name is not a class name: " + prop); continue; } } } else if(("jar".equals(u.getProtocol()))) { JarURLConnection con = (JarURLConnection) u.openConnection(); try (JarFile jar = con.getJarFile()) { Enumeration<JarEntry> entries = jar.entries(); while(entries.hasMoreElements()) { String prop = entries.nextElement().getName(); if(prop.startsWith(ELKIServiceLoader.RESOURCE_PREFIX)) { prop = prop.substring(ELKIServiceLoader.RESOURCE_PREFIX.length()); } else if(prop.startsWith(ELKIServiceLoader.FILENAME_PREFIX)) { prop = prop.substring(ELKIServiceLoader.FILENAME_PREFIX.length()); } else { continue; } try { knownParameterizables.add(Class.forName(prop)); } catch(ClassNotFoundException e) { LOG.warning("Service file name is not a class name: " + prop); continue; } } } } } } catch(IOException | URISyntaxException e) { throw new AbortException("Error enumerating service folders.", e); } final String internal = de.lmu.ifi.dbs.elki.utilities.optionhandling.Parameterizer.class.getPackage().getName(); for(final Class<?> cls : ELKIServiceRegistry.findAllImplementations(Object.class, false, false)) { // Classes in the same package are special and don't cause warnings. if(cls.getName().startsWith(internal)) { continue; } try { State state = State.NO_CONSTRUCTOR; state = checkV3Parameterization(cls, state); if(state == State.ERROR) { continue; } state = checkDefaultConstructor(cls, state); if(state == State.ERROR) { continue; } boolean expectedParameterizer = checkSupertypes(cls); if(state == State.NO_CONSTRUCTOR && expectedParameterizer) { LOG.verbose("Class " + cls.getName() + // " implements a parameterizable interface, but doesn't have a public and parameterless constructor!"); } if(state == State.INSTANTIABLE && !expectedParameterizer) { LOG.verbose("Class " + cls.getName() + // " has a parameterizer, but there is no service file for any of its interfaces."); } } catch(NoClassDefFoundError e) { LOG.verbose("Class discovered but not found: " + cls.getName() + " (missing: " + e.getMessage() + ")"); } } } }
public class class_name { public void checkParameterizables() { LoggingConfiguration.setVerbose(Level.VERBOSE); knownParameterizables = new ArrayList<>(); try { Enumeration<URL> us = getClass().getClassLoader().getResources(ELKIServiceLoader.RESOURCE_PREFIX); while(us.hasMoreElements()) { URL u = us.nextElement(); if("file".equals(u.getProtocol())) { for(String prop : new File(u.toURI()).list()) { try { knownParameterizables.add(Class.forName(prop)); // depends on control dependency: [try], data = [none] } catch(ClassNotFoundException e) { LOG.warning("Service file name is not a class name: " + prop); continue; } // depends on control dependency: [catch], data = [none] } } else if(("jar".equals(u.getProtocol()))) { JarURLConnection con = (JarURLConnection) u.openConnection(); try (JarFile jar = con.getJarFile()) { Enumeration<JarEntry> entries = jar.entries(); while(entries.hasMoreElements()) { String prop = entries.nextElement().getName(); if(prop.startsWith(ELKIServiceLoader.RESOURCE_PREFIX)) { prop = prop.substring(ELKIServiceLoader.RESOURCE_PREFIX.length()); // depends on control dependency: [if], data = [none] } else if(prop.startsWith(ELKIServiceLoader.FILENAME_PREFIX)) { prop = prop.substring(ELKIServiceLoader.FILENAME_PREFIX.length()); // depends on control dependency: [if], data = [none] } else { continue; } try { knownParameterizables.add(Class.forName(prop)); // depends on control dependency: [try], data = [none] } catch(ClassNotFoundException e) { LOG.warning("Service file name is not a class name: " + prop); continue; } // depends on control dependency: [catch], data = [none] } } } } } catch(IOException | URISyntaxException e) { throw new AbortException("Error enumerating service folders.", e); } // depends on control dependency: [catch], data = [none] final String internal = de.lmu.ifi.dbs.elki.utilities.optionhandling.Parameterizer.class.getPackage().getName(); for(final Class<?> cls : ELKIServiceRegistry.findAllImplementations(Object.class, false, false)) { // Classes in the same package are special and don't cause warnings. if(cls.getName().startsWith(internal)) { continue; } try { State state = State.NO_CONSTRUCTOR; state = checkV3Parameterization(cls, state); // depends on control dependency: [try], data = [none] if(state == State.ERROR) { continue; } state = checkDefaultConstructor(cls, state); // depends on control dependency: [try], data = [none] if(state == State.ERROR) { continue; } boolean expectedParameterizer = checkSupertypes(cls); if(state == State.NO_CONSTRUCTOR && expectedParameterizer) { LOG.verbose("Class " + cls.getName() + // " implements a parameterizable interface, but doesn't have a public and parameterless constructor!"); } if(state == State.INSTANTIABLE && !expectedParameterizer) { LOG.verbose("Class " + cls.getName() + // " has a parameterizer, but there is no service file for any of its interfaces."); // depends on control dependency: [if], data = [none] } } catch(NoClassDefFoundError e) { LOG.verbose("Class discovered but not found: " + cls.getName() + " (missing: " + e.getMessage() + ")"); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public String getEasterSunday(int year, int days) { int K = year / 100; int M = 15 + ( ( 3 * K + 3 ) / 4 ) - ( ( 8 * K + 13 ) / 25 ); int S = 2 - ( (3 * K + 3) / 4 ); int A = year % 19; int D = ( 19 * A + M ) % 30; int R = ( D / 29) + ( ( D / 28 ) - ( D / 29 ) * ( A / 11 ) ); int OG = 21 + D - R; int SZ = 7 - ( year + ( year / 4 ) + S ) % 7; int OE = 7 - ( OG - SZ ) % 7; int OS = OG + OE; SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); Calendar c = Calendar.getInstance(); String date; if( OS <= 31 ) { date = String.format("%04d-03-%02d", year, OS); } else{ date = String.format("%04d-04-%02d", year, ( OS - 31 ) ); } try{ c.setTime(formatter.parse(date)); c.add(Calendar.DAY_OF_MONTH, days); date = formatter.format(c.getTime()); } catch (ParseException e) { e.printStackTrace(); } return date; } }
public class class_name { public String getEasterSunday(int year, int days) { int K = year / 100; int M = 15 + ( ( 3 * K + 3 ) / 4 ) - ( ( 8 * K + 13 ) / 25 ); int S = 2 - ( (3 * K + 3) / 4 ); int A = year % 19; int D = ( 19 * A + M ) % 30; int R = ( D / 29) + ( ( D / 28 ) - ( D / 29 ) * ( A / 11 ) ); int OG = 21 + D - R; int SZ = 7 - ( year + ( year / 4 ) + S ) % 7; int OE = 7 - ( OG - SZ ) % 7; int OS = OG + OE; SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd"); Calendar c = Calendar.getInstance(); String date; if( OS <= 31 ) { date = String.format("%04d-03-%02d", year, OS); // depends on control dependency: [if], data = [none] } else{ date = String.format("%04d-04-%02d", year, ( OS - 31 ) ); // depends on control dependency: [if], data = [( OS] } try{ c.setTime(formatter.parse(date)); // depends on control dependency: [try], data = [none] c.add(Calendar.DAY_OF_MONTH, days); // depends on control dependency: [try], data = [none] date = formatter.format(c.getTime()); // depends on control dependency: [try], data = [none] } catch (ParseException e) { e.printStackTrace(); } // depends on control dependency: [catch], data = [none] return date; } }
public class class_name { public DelegationSet withNameServers(String... nameServers) { if (this.nameServers == null) { setNameServers(new com.amazonaws.internal.SdkInternalList<String>(nameServers.length)); } for (String ele : nameServers) { this.nameServers.add(ele); } return this; } }
public class class_name { public DelegationSet withNameServers(String... nameServers) { if (this.nameServers == null) { setNameServers(new com.amazonaws.internal.SdkInternalList<String>(nameServers.length)); // depends on control dependency: [if], data = [none] } for (String ele : nameServers) { this.nameServers.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public void marshall(GetBotVersionsRequest getBotVersionsRequest, ProtocolMarshaller protocolMarshaller) { if (getBotVersionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getBotVersionsRequest.getName(), NAME_BINDING); protocolMarshaller.marshall(getBotVersionsRequest.getNextToken(), NEXTTOKEN_BINDING); protocolMarshaller.marshall(getBotVersionsRequest.getMaxResults(), MAXRESULTS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetBotVersionsRequest getBotVersionsRequest, ProtocolMarshaller protocolMarshaller) { if (getBotVersionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getBotVersionsRequest.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getBotVersionsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getBotVersionsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public <T> File writeLines(Collection<T> list, LineSeparator lineSeparator, boolean isAppend) throws IORuntimeException { try (PrintWriter writer = getPrintWriter(isAppend)){ for (T t : list) { if (null != t) { writer.print(t.toString()); printNewLine(writer, lineSeparator); writer.flush(); } } } return this.file; } }
public class class_name { public <T> File writeLines(Collection<T> list, LineSeparator lineSeparator, boolean isAppend) throws IORuntimeException { try (PrintWriter writer = getPrintWriter(isAppend)){ for (T t : list) { if (null != t) { writer.print(t.toString()); // depends on control dependency: [if], data = [none] printNewLine(writer, lineSeparator); // depends on control dependency: [if], data = [none] writer.flush(); // depends on control dependency: [if], data = [none] } } } return this.file; } }
public class class_name { @SuppressWarnings("ProhibitedExceptionDeclared") @PostConstruct public void init() throws Exception { final String configFileLocation = getConfigFileLocation(); T fileConfig = getEmptyConfig(); boolean fileExists = false; log.info("Using {} as config file location", configFileLocation); try (Reader reader = new BufferedReader(new InputStreamReader(new FileInputStream(configFileLocation), "UTF-8"))) { fileConfig = mapper.readValue(reader, getConfigClass()); if (fileConfig instanceof PasswordsConfig<?>) { fileConfig = ((PasswordsConfig<T>) fileConfig).withDecryptedPasswords(textEncryptor); } fileExists = true; } catch (final FileNotFoundException e) { log.warn("Config file not found, using empty configuration object"); } catch (final IOException e) { log.error("Error reading config file at {}", configFileLocation); log.error("Recording stack trace", e); // throw this so we don't continue to start the webapp throw new IllegalStateException("Could not initialize configuration", e); } if (StringUtils.isNotBlank(defaultConfigFile)) { try (InputStream defaultConfigInputStream = getClass().getResourceAsStream(defaultConfigFile)) { if (defaultConfigInputStream != null) { final T defaultConfig = mapper.readValue(defaultConfigInputStream, getConfigClass()); fileConfig = fileConfig.merge(defaultConfig); if (!fileExists) { fileConfig = generateDefaultLogin(fileConfig); try { writeOutConfigFile(fileConfig, configFileLocation); } catch (final IOException e) { throw new IllegalStateException("Could not initialize configuration", e); } } } } catch (final IOException e) { log.error("Error reading default config file", e); // throw this so we don't continue to start the webapp throw new IllegalStateException("Could not initialize configuration", e); } } try { fileConfig.basicValidate("Root"); } catch (final ConfigException e) { log.error("Config validation failed in " + e); throw new IllegalStateException("Could not initialize configuration", e); } config.set(fileConfig); postInitialise(getConfig()); } }
public class class_name { @SuppressWarnings("ProhibitedExceptionDeclared") @PostConstruct public void init() throws Exception { final String configFileLocation = getConfigFileLocation(); T fileConfig = getEmptyConfig(); boolean fileExists = false; log.info("Using {} as config file location", configFileLocation); try (Reader reader = new BufferedReader(new InputStreamReader(new FileInputStream(configFileLocation), "UTF-8"))) { fileConfig = mapper.readValue(reader, getConfigClass()); if (fileConfig instanceof PasswordsConfig<?>) { fileConfig = ((PasswordsConfig<T>) fileConfig).withDecryptedPasswords(textEncryptor); // depends on control dependency: [if], data = [)] } fileExists = true; } catch (final FileNotFoundException e) { log.warn("Config file not found, using empty configuration object"); } catch (final IOException e) { log.error("Error reading config file at {}", configFileLocation); log.error("Recording stack trace", e); // throw this so we don't continue to start the webapp throw new IllegalStateException("Could not initialize configuration", e); } if (StringUtils.isNotBlank(defaultConfigFile)) { try (InputStream defaultConfigInputStream = getClass().getResourceAsStream(defaultConfigFile)) { if (defaultConfigInputStream != null) { final T defaultConfig = mapper.readValue(defaultConfigInputStream, getConfigClass()); fileConfig = fileConfig.merge(defaultConfig); // depends on control dependency: [if], data = [none] if (!fileExists) { fileConfig = generateDefaultLogin(fileConfig); // depends on control dependency: [if], data = [none] try { writeOutConfigFile(fileConfig, configFileLocation); // depends on control dependency: [try], data = [none] } catch (final IOException e) { throw new IllegalStateException("Could not initialize configuration", e); } // depends on control dependency: [catch], data = [none] } } } catch (final IOException e) { log.error("Error reading default config file", e); // throw this so we don't continue to start the webapp throw new IllegalStateException("Could not initialize configuration", e); } } try { fileConfig.basicValidate("Root"); } catch (final ConfigException e) { log.error("Config validation failed in " + e); throw new IllegalStateException("Could not initialize configuration", e); } config.set(fileConfig); postInitialise(getConfig()); } }
public class class_name { public <P extends ParaObject> P read(String id) { if (StringUtils.isBlank(id)) { return null; } Map<String, Object> data = getEntity(invokeGet("_id/".concat(id), null), Map.class); return ParaObjectUtils.setAnnotatedFields(data); } }
public class class_name { public <P extends ParaObject> P read(String id) { if (StringUtils.isBlank(id)) { return null; // depends on control dependency: [if], data = [none] } Map<String, Object> data = getEntity(invokeGet("_id/".concat(id), null), Map.class); return ParaObjectUtils.setAnnotatedFields(data); } }
public class class_name { @Deprecated BasicUser getCurrentUserBasic(AccessToken accessToken) { checkAccessTokenIsNotNull(accessToken); StatusType status; String content; try { Response response = targetEndpoint.path("me").request(MediaType.APPLICATION_JSON) .header("Authorization", BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, getConnectTimeout()) .property(ClientProperties.READ_TIMEOUT, getReadTimeout()) .get(); status = response.getStatusInfo(); content = response.readEntity(String.class); } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } checkAndHandleResponse(content, status, accessToken); return mapToType(content, BasicUser.class); } }
public class class_name { @Deprecated BasicUser getCurrentUserBasic(AccessToken accessToken) { checkAccessTokenIsNotNull(accessToken); StatusType status; String content; try { Response response = targetEndpoint.path("me").request(MediaType.APPLICATION_JSON) .header("Authorization", BEARER + accessToken.getToken()) .property(ClientProperties.CONNECT_TIMEOUT, getConnectTimeout()) .property(ClientProperties.READ_TIMEOUT, getReadTimeout()) .get(); status = response.getStatusInfo(); // depends on control dependency: [try], data = [none] content = response.readEntity(String.class); // depends on control dependency: [try], data = [none] } catch (ProcessingException e) { throw new ConnectionInitializationException(CONNECTION_SETUP_ERROR_STRING, e); } // depends on control dependency: [catch], data = [none] checkAndHandleResponse(content, status, accessToken); return mapToType(content, BasicUser.class); } }
public class class_name { public GridCell<P> removeCellAt(int row, int column) { final GridCell<P> cell = this.cells[row][column]; if (cell != null) { this.cells[row][column] = null; --this.cellCount; for (final P element : cell) { removeElement(element); } } return cell; } }
public class class_name { public GridCell<P> removeCellAt(int row, int column) { final GridCell<P> cell = this.cells[row][column]; if (cell != null) { this.cells[row][column] = null; // depends on control dependency: [if], data = [none] --this.cellCount; // depends on control dependency: [if], data = [none] for (final P element : cell) { removeElement(element); // depends on control dependency: [for], data = [element] } } return cell; } }
public class class_name { public Page transform(Factory factory, ConfigImpl config, PageSource ps, TagLib[] tlibs, FunctionLib[] flibs, boolean returnValue, boolean ignoreScopes) throws TemplateException, IOException { Page p; SourceCode sc; boolean writeLog = config.getExecutionLogEnabled(); Charset charset = config.getTemplateCharset(); boolean dotUpper = ps.getDialect() == CFMLEngine.DIALECT_CFML && ((MappingImpl) ps.getMapping()).getDotNotationUpperCase(); // parse regular while (true) { try { sc = new PageSourceCode(ps, charset, writeLog); p = transform(factory, config, sc, tlibs, flibs, ps.getResource().lastModified(), dotUpper, returnValue, ignoreScopes); break; } catch (ProcessingDirectiveException pde) { if (pde.getWriteLog() != null) writeLog = pde.getWriteLog().booleanValue(); if (pde.getDotNotationUpperCase() != null) dotUpper = pde.getDotNotationUpperCase().booleanValue(); if (!StringUtil.isEmpty(pde.getCharset())) charset = pde.getCharset(); } } // could it be a component? boolean isCFML = ps.getDialect() == CFMLEngine.DIALECT_CFML; boolean isCFMLCompExt = isCFML && Constants.isCFMLComponentExtension(ResourceUtil.getExtension(ps.getResource(), "")); boolean possibleUndetectedComponent = false; // we don't have a component or interface if (p.isPage()) { if (isCFML) possibleUndetectedComponent = isCFMLCompExt; else if (Constants.isLuceeComponentExtension(ResourceUtil.getExtension(ps.getResource(), ""))) { Expression expr; Statement stat; PrintOut po; LitString ls; List<Statement> statements = p.getStatements(); // check the root statements for component Iterator<Statement> it = statements.iterator(); String str; while (it.hasNext()) { stat = it.next(); if (stat instanceof PrintOut && (expr = ((PrintOut) stat).getExpr()) instanceof LitString) { ls = (LitString) expr; str = ls.getString(); if (str.indexOf(Constants.LUCEE_COMPONENT_TAG_NAME) != -1 || str.indexOf(Constants.LUCEE_INTERFACE_TAG_NAME) != -1 || str.indexOf(Constants.CFML_COMPONENT_TAG_NAME) != -1 // cfml name is supported as alias ) { possibleUndetectedComponent = true; break; } } } } } if (possibleUndetectedComponent) { Page _p; TagLibTag scriptTag = CFMLTransformer.getTLT(sc, isCFML ? Constants.CFML_SCRIPT_TAG_NAME : Constants.LUCEE_SCRIPT_TAG_NAME, config.getIdentification()); sc.setPos(0); SourceCode original = sc; // try inside a cfscript String text = "<" + scriptTag.getFullName() + ">" + original.getText() + "\n</" + scriptTag.getFullName() + ">"; sc = new PageSourceCode(ps, text, charset, writeLog); try { while (true) { if (sc == null) { sc = new PageSourceCode(ps, charset, writeLog); text = "<" + scriptTag.getFullName() + ">" + sc.getText() + "\n</" + scriptTag.getFullName() + ">"; sc = new PageSourceCode(ps, text, charset, writeLog); } try { _p = transform(factory, config, sc, tlibs, flibs, ps.getResource().lastModified(), dotUpper, returnValue, ignoreScopes); break; } catch (ProcessingDirectiveException pde) { if (pde.getWriteLog() != null) writeLog = pde.getWriteLog().booleanValue(); if (pde.getDotNotationUpperCase() != null) dotUpper = pde.getDotNotationUpperCase().booleanValue(); if (!StringUtil.isEmpty(pde.getCharset())) charset = pde.getCharset(); sc = null; } } } catch (ComponentTemplateException e) { throw e.getTemplateException(); } // we only use that result if it is a component now if (_p != null && !_p.isPage()) return _p; } if (isCFMLCompExt && !p.isComponent() && !p.isInterface()) { String msg = "template [" + ps.getDisplayPath() + "] must contain a component or an interface."; if (sc != null) throw new TemplateException(sc, msg); throw new TemplateException(msg); } return p; } }
public class class_name { public Page transform(Factory factory, ConfigImpl config, PageSource ps, TagLib[] tlibs, FunctionLib[] flibs, boolean returnValue, boolean ignoreScopes) throws TemplateException, IOException { Page p; SourceCode sc; boolean writeLog = config.getExecutionLogEnabled(); Charset charset = config.getTemplateCharset(); boolean dotUpper = ps.getDialect() == CFMLEngine.DIALECT_CFML && ((MappingImpl) ps.getMapping()).getDotNotationUpperCase(); // parse regular while (true) { try { sc = new PageSourceCode(ps, charset, writeLog); // depends on control dependency: [try], data = [none] p = transform(factory, config, sc, tlibs, flibs, ps.getResource().lastModified(), dotUpper, returnValue, ignoreScopes); // depends on control dependency: [try], data = [none] break; } catch (ProcessingDirectiveException pde) { if (pde.getWriteLog() != null) writeLog = pde.getWriteLog().booleanValue(); if (pde.getDotNotationUpperCase() != null) dotUpper = pde.getDotNotationUpperCase().booleanValue(); if (!StringUtil.isEmpty(pde.getCharset())) charset = pde.getCharset(); } // depends on control dependency: [catch], data = [none] } // could it be a component? boolean isCFML = ps.getDialect() == CFMLEngine.DIALECT_CFML; boolean isCFMLCompExt = isCFML && Constants.isCFMLComponentExtension(ResourceUtil.getExtension(ps.getResource(), "")); boolean possibleUndetectedComponent = false; // we don't have a component or interface if (p.isPage()) { if (isCFML) possibleUndetectedComponent = isCFMLCompExt; else if (Constants.isLuceeComponentExtension(ResourceUtil.getExtension(ps.getResource(), ""))) { Expression expr; Statement stat; PrintOut po; LitString ls; List<Statement> statements = p.getStatements(); // check the root statements for component Iterator<Statement> it = statements.iterator(); String str; while (it.hasNext()) { stat = it.next(); if (stat instanceof PrintOut && (expr = ((PrintOut) stat).getExpr()) instanceof LitString) { ls = (LitString) expr; str = ls.getString(); if (str.indexOf(Constants.LUCEE_COMPONENT_TAG_NAME) != -1 || str.indexOf(Constants.LUCEE_INTERFACE_TAG_NAME) != -1 || str.indexOf(Constants.CFML_COMPONENT_TAG_NAME) != -1 // cfml name is supported as alias ) { possibleUndetectedComponent = true; break; } } } } } if (possibleUndetectedComponent) { Page _p; TagLibTag scriptTag = CFMLTransformer.getTLT(sc, isCFML ? Constants.CFML_SCRIPT_TAG_NAME : Constants.LUCEE_SCRIPT_TAG_NAME, config.getIdentification()); sc.setPos(0); SourceCode original = sc; // try inside a cfscript String text = "<" + scriptTag.getFullName() + ">" + original.getText() + "\n</" + scriptTag.getFullName() + ">"; sc = new PageSourceCode(ps, text, charset, writeLog); try { while (true) { if (sc == null) { sc = new PageSourceCode(ps, charset, writeLog); text = "<" + scriptTag.getFullName() + ">" + sc.getText() + "\n</" + scriptTag.getFullName() + ">"; sc = new PageSourceCode(ps, text, charset, writeLog); } try { _p = transform(factory, config, sc, tlibs, flibs, ps.getResource().lastModified(), dotUpper, returnValue, ignoreScopes); break; } catch (ProcessingDirectiveException pde) { if (pde.getWriteLog() != null) writeLog = pde.getWriteLog().booleanValue(); if (pde.getDotNotationUpperCase() != null) dotUpper = pde.getDotNotationUpperCase().booleanValue(); if (!StringUtil.isEmpty(pde.getCharset())) charset = pde.getCharset(); sc = null; } } } catch (ComponentTemplateException e) { throw e.getTemplateException(); } // we only use that result if it is a component now if (_p != null && !_p.isPage()) return _p; } if (isCFMLCompExt && !p.isComponent() && !p.isInterface()) { String msg = "template [" + ps.getDisplayPath() + "] must contain a component or an interface."; if (sc != null) throw new TemplateException(sc, msg); throw new TemplateException(msg); } return p; } }
public class class_name { public static boolean stopProcess(final AbstractProcessorThread proc) { proc.info("************************************************************"); proc.info(" * Stopping " + proc.getName()); proc.info("************************************************************"); proc.setRunning(false); proc.interrupt(); boolean ok = true; try { proc.join(20 * 1000); } catch (final InterruptedException ignored) { } catch (final Throwable t) { proc.error("Error waiting for processor termination"); proc.error(t); ok = false; } proc.info("************************************************************"); proc.info(" * " + proc.getName() + " terminated"); proc.info("************************************************************"); return ok; } }
public class class_name { public static boolean stopProcess(final AbstractProcessorThread proc) { proc.info("************************************************************"); proc.info(" * Stopping " + proc.getName()); proc.info("************************************************************"); proc.setRunning(false); proc.interrupt(); boolean ok = true; try { proc.join(20 * 1000); // depends on control dependency: [try], data = [none] } catch (final InterruptedException ignored) { } catch (final Throwable t) { // depends on control dependency: [catch], data = [none] proc.error("Error waiting for processor termination"); proc.error(t); ok = false; } // depends on control dependency: [catch], data = [none] proc.info("************************************************************"); proc.info(" * " + proc.getName() + " terminated"); proc.info("************************************************************"); return ok; } }
public class class_name { private static String getBundle(String friendlyName, String className, int truncate) { try { cl.loadClass(className); int start = className.length(); for (int i = 0; i < truncate; ++i) start = className.lastIndexOf('.', start - 1); final String bundle = className.substring(0, start); return "+ " + friendlyName + align(friendlyName) + "- " + bundle; } catch (final ClassNotFoundException e) {} catch (final NoClassDefFoundError e) {} return "- " + friendlyName + align(friendlyName) + "- not available"; } }
public class class_name { private static String getBundle(String friendlyName, String className, int truncate) { try { cl.loadClass(className); // depends on control dependency: [try], data = [none] int start = className.length(); for (int i = 0; i < truncate; ++i) start = className.lastIndexOf('.', start - 1); final String bundle = className.substring(0, start); return "+ " + friendlyName + align(friendlyName) + "- " + bundle; // depends on control dependency: [try], data = [none] } catch (final ClassNotFoundException e) {} // depends on control dependency: [catch], data = [none] catch (final NoClassDefFoundError e) {} // depends on control dependency: [catch], data = [none] return "- " + friendlyName + align(friendlyName) + "- not available"; } }
public class class_name { public static Point3D_F64 closestPoint(LineSegment3D_F64 line, Point3D_F64 pt, Point3D_F64 ret) { if( ret == null ) { ret = new Point3D_F64(); } double dx = pt.x - line.a.x; double dy = pt.y - line.a.y; double dz = pt.z - line.a.z; double slope_x = line.b.x - line.a.x; double slope_y = line.b.y - line.a.y; double slope_z = line.b.z - line.a.z; double n = (double) Math.sqrt(slope_x*slope_x + slope_y*slope_y + slope_z*slope_z); double d = (slope_x*dx + slope_y*dy + slope_z*dz) / n; // if it is past the end points just return one of the end points if( d <= 0 ) { ret.set(line.a); } else if( d >= n ) { ret.set(line.b); } else { ret.x = line.a.x + d * slope_x / n; ret.y = line.a.y + d * slope_y / n; ret.z = line.a.z + d * slope_z / n; } return ret; } }
public class class_name { public static Point3D_F64 closestPoint(LineSegment3D_F64 line, Point3D_F64 pt, Point3D_F64 ret) { if( ret == null ) { ret = new Point3D_F64(); // depends on control dependency: [if], data = [none] } double dx = pt.x - line.a.x; double dy = pt.y - line.a.y; double dz = pt.z - line.a.z; double slope_x = line.b.x - line.a.x; double slope_y = line.b.y - line.a.y; double slope_z = line.b.z - line.a.z; double n = (double) Math.sqrt(slope_x*slope_x + slope_y*slope_y + slope_z*slope_z); double d = (slope_x*dx + slope_y*dy + slope_z*dz) / n; // if it is past the end points just return one of the end points if( d <= 0 ) { ret.set(line.a); // depends on control dependency: [if], data = [none] } else if( d >= n ) { ret.set(line.b); // depends on control dependency: [if], data = [none] } else { ret.x = line.a.x + d * slope_x / n; // depends on control dependency: [if], data = [none] ret.y = line.a.y + d * slope_y / n; // depends on control dependency: [if], data = [none] ret.z = line.a.z + d * slope_z / n; // depends on control dependency: [if], data = [none] } return ret; } }
public class class_name { private ClientImpl createClientAndConnect() throws UnknownHostException, IOException { // Make client connections. ClientImpl clientTmp = (ClientImpl) ClientFactory.createClient(this.config); // ENG-6231: Only fail if we can't connect to any of the provided servers. boolean connectedAnything = false; for (String server : this.servers) { try { clientTmp.createConnection(server); connectedAnything = true; } catch (UnknownHostException e) { } catch (IOException e) { } } if (!connectedAnything) { try { clientTmp.close(); } catch (InterruptedException ie) {} throw new IOException("Unable to connect to VoltDB cluster with servers: " + this.servers); } this.client.set(clientTmp); this.users++; return clientTmp; } }
public class class_name { private ClientImpl createClientAndConnect() throws UnknownHostException, IOException { // Make client connections. ClientImpl clientTmp = (ClientImpl) ClientFactory.createClient(this.config); // ENG-6231: Only fail if we can't connect to any of the provided servers. boolean connectedAnything = false; for (String server : this.servers) { try { clientTmp.createConnection(server); // depends on control dependency: [try], data = [none] connectedAnything = true; // depends on control dependency: [try], data = [none] } catch (UnknownHostException e) { } // depends on control dependency: [catch], data = [none] catch (IOException e) { } // depends on control dependency: [catch], data = [none] } if (!connectedAnything) { try { clientTmp.close(); // depends on control dependency: [try], data = [none] } catch (InterruptedException ie) {} // depends on control dependency: [catch], data = [none] throw new IOException("Unable to connect to VoltDB cluster with servers: " + this.servers); } this.client.set(clientTmp); this.users++; return clientTmp; } }
public class class_name { public void setFilter(UnicodeFilter filter) { if (filter == null) { this.filter = null; } else { try { // fast high-runner case this.filter = new UnicodeSet((UnicodeSet)filter).freeze(); } catch (Exception e) { this.filter = new UnicodeSet(); filter.addMatchSetTo(this.filter); this.filter.freeze(); } } } }
public class class_name { public void setFilter(UnicodeFilter filter) { if (filter == null) { this.filter = null; // depends on control dependency: [if], data = [none] } else { try { // fast high-runner case this.filter = new UnicodeSet((UnicodeSet)filter).freeze(); // depends on control dependency: [try], data = [none] } catch (Exception e) { this.filter = new UnicodeSet(); filter.addMatchSetTo(this.filter); this.filter.freeze(); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { @Override public final String getFor(final Class<?> pClass, final String pThingName) { if ("list".equals(pThingName)) { if (pClass == PaymentFrom.class || pClass == PaymentTo.class || pClass == PrepaymentFrom.class || pClass == PrepaymentTo.class || pClass == SubaccountLine.class || pClass == AdditionCostLine.class || pClass == Account.class) { return PrcPageWithSubaccTypes.class.getSimpleName(); } else { return PrcEntitiesPage.class.getSimpleName(); } } else if ("about".equals(pThingName)) { return PrcAbout.class.getSimpleName(); } return null; } }
public class class_name { @Override public final String getFor(final Class<?> pClass, final String pThingName) { if ("list".equals(pThingName)) { if (pClass == PaymentFrom.class || pClass == PaymentTo.class || pClass == PrepaymentFrom.class || pClass == PrepaymentTo.class || pClass == SubaccountLine.class || pClass == AdditionCostLine.class || pClass == Account.class) { return PrcPageWithSubaccTypes.class.getSimpleName(); // depends on control dependency: [if], data = [none] } else { return PrcEntitiesPage.class.getSimpleName(); // depends on control dependency: [if], data = [none] } } else if ("about".equals(pThingName)) { return PrcAbout.class.getSimpleName(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static @Nullable Dimension getDimension(@NotNull Rendition rendition, boolean suppressLogWarningNoRenditionsMetadata) { String fileExtension = FilenameUtils.getExtension(getFilename(rendition)); if (!FileExtension.isImage(fileExtension)) { // this is not a supported image file extension - skip further processing return null; } // get image width/height Dimension dimension = null; boolean isOriginal = isOriginal(rendition); if (isOriginal) { // get width/height from metadata for original renditions dimension = getDimensionFromOriginal(rendition); } else { // otherwise get from rendition metadata written by {@link DamRenditionMetadataService} dimension = getDimensionFromRenditionMetadata(rendition); } // fallback: if width/height could not be read from either asset or rendition metadata load the image // into memory and get width/height from there - but log an warning because this is inefficient if (dimension == null) { dimension = getDimensionFromImageBinary(rendition, suppressLogWarningNoRenditionsMetadata); } return dimension; } }
public class class_name { public static @Nullable Dimension getDimension(@NotNull Rendition rendition, boolean suppressLogWarningNoRenditionsMetadata) { String fileExtension = FilenameUtils.getExtension(getFilename(rendition)); if (!FileExtension.isImage(fileExtension)) { // this is not a supported image file extension - skip further processing return null; // depends on control dependency: [if], data = [none] } // get image width/height Dimension dimension = null; boolean isOriginal = isOriginal(rendition); if (isOriginal) { // get width/height from metadata for original renditions dimension = getDimensionFromOriginal(rendition); // depends on control dependency: [if], data = [none] } else { // otherwise get from rendition metadata written by {@link DamRenditionMetadataService} dimension = getDimensionFromRenditionMetadata(rendition); // depends on control dependency: [if], data = [none] } // fallback: if width/height could not be read from either asset or rendition metadata load the image // into memory and get width/height from there - but log an warning because this is inefficient if (dimension == null) { dimension = getDimensionFromImageBinary(rendition, suppressLogWarningNoRenditionsMetadata); // depends on control dependency: [if], data = [none] } return dimension; } }
public class class_name { private boolean getThenPerformAdj(int operation, int position, boolean value) { checkMutable(); final int i = position >> ADDRESS_BITS; final long m = 1L << (position & ADDRESS_MASK); final long v = bits[i] & m; switch(operation) { case SET : if (value) { bits[i] |= m; } else { bits[i] &= ~m; } break; case AND : if (value) { /* no-op */ } else { bits[i] &= ~m; } break; case OR : if (value) { bits[i] |= m; } else { /* no-op */ } break; case XOR : if (value) { bits[i] ^= m; } else { /* no-op */ } break; } return v != 0; } }
public class class_name { private boolean getThenPerformAdj(int operation, int position, boolean value) { checkMutable(); final int i = position >> ADDRESS_BITS; final long m = 1L << (position & ADDRESS_MASK); final long v = bits[i] & m; switch(operation) { case SET : if (value) { bits[i] |= m; // depends on control dependency: [if], data = [none] } else { bits[i] &= ~m; // depends on control dependency: [if], data = [none] } break; case AND : if (value) { /* no-op */ } else { bits[i] &= ~m; // depends on control dependency: [if], data = [none] } break; case OR : if (value) { bits[i] |= m; // depends on control dependency: [if], data = [none] } else { /* no-op */ } break; case XOR : if (value) { bits[i] ^= m; // depends on control dependency: [if], data = [none] } else { /* no-op */ } break; } return v != 0; } }
public class class_name { public static <T> T fromXml(File xmlPath, Class<T> type) { BufferedReader reader = null; StringBuilder sb = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(xmlPath), ENCODING)); String line = null; sb = new StringBuilder(); while ((line = reader.readLine()) != null) { sb.append(line + "\n"); } } catch (FileNotFoundException e) { throw new IllegalArgumentException(e.getMessage()); } catch (Exception e) { throw new RuntimeException(e); } finally { if (reader != null) { try { reader.close(); } catch (IOException e) { //ignore } } } return fromXml(sb.toString(), type); } }
public class class_name { public static <T> T fromXml(File xmlPath, Class<T> type) { BufferedReader reader = null; StringBuilder sb = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(xmlPath), ENCODING)); // depends on control dependency: [try], data = [none] String line = null; sb = new StringBuilder(); // depends on control dependency: [try], data = [none] while ((line = reader.readLine()) != null) { sb.append(line + "\n"); // depends on control dependency: [while], data = [none] } } catch (FileNotFoundException e) { throw new IllegalArgumentException(e.getMessage()); } catch (Exception e) { // depends on control dependency: [catch], data = [none] throw new RuntimeException(e); } finally { // depends on control dependency: [catch], data = [none] if (reader != null) { try { reader.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { //ignore } // depends on control dependency: [catch], data = [none] } } return fromXml(sb.toString(), type); } }
public class class_name { public final void release() { synchronized (this.LOCK) { if ((this.owner != null) && (this.owner.get() == Thread.currentThread())) { this.acquire--; if (this.acquire == 0) { this.owner = null; this.LOCK.notify(); } return; } if ((this.owner != null) && (this.owner.get() == null)) { this.owner = null; this.acquire = 0; this.LOCK.notify(); } throw new IllegalMonitorStateException(Thread.currentThread() .getName() + " is not the owner of the lock. " + ((this.owner != null) ? ((Thread) this.owner.get()) .getName() : "nobody") + " is the owner."); } } }
public class class_name { public final void release() { synchronized (this.LOCK) { if ((this.owner != null) && (this.owner.get() == Thread.currentThread())) { this.acquire--; // depends on control dependency: [if], data = [none] if (this.acquire == 0) { this.owner = null; // depends on control dependency: [if], data = [none] this.LOCK.notify(); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } if ((this.owner != null) && (this.owner.get() == null)) { this.owner = null; // depends on control dependency: [if], data = [none] this.acquire = 0; // depends on control dependency: [if], data = [none] this.LOCK.notify(); // depends on control dependency: [if], data = [none] } throw new IllegalMonitorStateException(Thread.currentThread() .getName() + " is not the owner of the lock. " + ((this.owner != null) ? ((Thread) this.owner.get()) .getName() : "nobody") + " is the owner."); } } }
public class class_name { @Override public void shutdown() { log.info("Shutting down Ontology crawlers."); this.executor.shutdown(); // Disable new tasks from being submitted try { // Wait a while for existing tasks to terminate if (!this.executor.awaitTermination(5, TimeUnit.SECONDS)) { this.executor.shutdownNow(); // Cancel currently executing tasks // Wait a while for tasks to respond to being cancelled if (!this.executor.awaitTermination(2, TimeUnit.SECONDS)) log.error("Pool did not terminate"); } } catch (InterruptedException ie) { // (Re-)Cancel if current thread also interrupted this.executor.shutdownNow(); // Preserve interrupt status Thread.currentThread().interrupt(); } } }
public class class_name { @Override public void shutdown() { log.info("Shutting down Ontology crawlers."); this.executor.shutdown(); // Disable new tasks from being submitted try { // Wait a while for existing tasks to terminate if (!this.executor.awaitTermination(5, TimeUnit.SECONDS)) { this.executor.shutdownNow(); // Cancel currently executing tasks // depends on control dependency: [if], data = [none] // Wait a while for tasks to respond to being cancelled if (!this.executor.awaitTermination(2, TimeUnit.SECONDS)) log.error("Pool did not terminate"); } } catch (InterruptedException ie) { // (Re-)Cancel if current thread also interrupted this.executor.shutdownNow(); // Preserve interrupt status Thread.currentThread().interrupt(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { protected void generateIScriptBuilder() { final List<StringConcatenationClient> topElements = generateTopElements(true, false); final TypeReference builder = getScriptBuilderInterface(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** Builder of " + getLanguageName() + " scripts."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append(" *"); //$NON-NLS-1$ it.newLine(); it.append(" * <p>This builder is provided for helping to create " //$NON-NLS-1$ + getLanguageName() + " Ecore elements."); //$NON-NLS-1$ it.newLine(); it.append(" *"); //$NON-NLS-1$ it.newLine(); it.append(" * <p>Do not forget to invoke {@link #finalizeScript()} for creating imports, etc."); //$NON-NLS-1$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public interface "); //$NON-NLS-1$ it.append(builder.getSimpleName()); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateFieldsAndMethods(true, false)); for (final StringConcatenationClient element : topElements) { it.append(element); } it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(builder, content); javaFile.writeTo(getSrcGen()); } }
public class class_name { protected void generateIScriptBuilder() { final List<StringConcatenationClient> topElements = generateTopElements(true, false); final TypeReference builder = getScriptBuilderInterface(); final StringConcatenationClient content = new StringConcatenationClient() { @Override protected void appendTo(TargetStringConcatenation it) { it.append("/** Builder of " + getLanguageName() + " scripts."); //$NON-NLS-1$ //$NON-NLS-2$ it.newLine(); it.append(" *"); //$NON-NLS-1$ it.newLine(); it.append(" * <p>This builder is provided for helping to create " //$NON-NLS-1$ + getLanguageName() + " Ecore elements."); //$NON-NLS-1$ it.newLine(); it.append(" *"); //$NON-NLS-1$ it.newLine(); it.append(" * <p>Do not forget to invoke {@link #finalizeScript()} for creating imports, etc."); //$NON-NLS-1$ it.newLine(); it.append(" */"); //$NON-NLS-1$ it.newLine(); it.append("@SuppressWarnings(\"all\")"); //$NON-NLS-1$ it.newLine(); it.append("public interface "); //$NON-NLS-1$ it.append(builder.getSimpleName()); it.append(" {"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); it.append(generateFieldsAndMethods(true, false)); for (final StringConcatenationClient element : topElements) { it.append(element); // depends on control dependency: [for], data = [element] } it.append("}"); //$NON-NLS-1$ it.newLineIfNotEmpty(); it.newLine(); } }; final JavaFileAccess javaFile = getFileAccessFactory().createJavaFile(builder, content); javaFile.writeTo(getSrcGen()); } }
public class class_name { @Override public void doRender(final WComponent component, final WebXmlRenderContext renderContext) { WSelectToggle toggle = (WSelectToggle) component; XmlStringBuilder xml = renderContext.getWriter(); xml.appendTagOpen("ui:selecttoggle"); xml.appendAttribute("id", component.getId()); xml.appendOptionalAttribute("class", component.getHtmlClass()); xml.appendOptionalAttribute("track", component.isTracking(), "true"); State state = toggle.getState(); if (State.ALL.equals(state)) { xml.appendAttribute("selected", "all"); } else if (State.NONE.equals(state)) { xml.appendAttribute("selected", "none"); } else { xml.appendAttribute("selected", "some"); } xml.appendOptionalAttribute("disabled", toggle.isDisabled(), "true"); xml.appendAttribute("target", toggle.getTarget().getId()); xml.appendAttribute("renderAs", toggle.isRenderAsText() ? "text" : "control"); xml.appendOptionalAttribute("roundTrip", !toggle.isClientSide(), "true"); xml.appendEnd(); } }
public class class_name { @Override public void doRender(final WComponent component, final WebXmlRenderContext renderContext) { WSelectToggle toggle = (WSelectToggle) component; XmlStringBuilder xml = renderContext.getWriter(); xml.appendTagOpen("ui:selecttoggle"); xml.appendAttribute("id", component.getId()); xml.appendOptionalAttribute("class", component.getHtmlClass()); xml.appendOptionalAttribute("track", component.isTracking(), "true"); State state = toggle.getState(); if (State.ALL.equals(state)) { xml.appendAttribute("selected", "all"); // depends on control dependency: [if], data = [none] } else if (State.NONE.equals(state)) { xml.appendAttribute("selected", "none"); // depends on control dependency: [if], data = [none] } else { xml.appendAttribute("selected", "some"); // depends on control dependency: [if], data = [none] } xml.appendOptionalAttribute("disabled", toggle.isDisabled(), "true"); xml.appendAttribute("target", toggle.getTarget().getId()); xml.appendAttribute("renderAs", toggle.isRenderAsText() ? "text" : "control"); xml.appendOptionalAttribute("roundTrip", !toggle.isClientSide(), "true"); xml.appendEnd(); } }
public class class_name { public boolean removeHandler(PacketHandler handler) { synchronized (this.handlers) { boolean removed = this.handlers.remove(handler); if (removed) { this.count.decrementAndGet(); } return removed; } } }
public class class_name { public boolean removeHandler(PacketHandler handler) { synchronized (this.handlers) { boolean removed = this.handlers.remove(handler); if (removed) { this.count.decrementAndGet(); // depends on control dependency: [if], data = [none] } return removed; } } }
public class class_name { public java.util.List<UnsuccessfulItem> getUnsuccessful() { if (unsuccessful == null) { unsuccessful = new com.amazonaws.internal.SdkInternalList<UnsuccessfulItem>(); } return unsuccessful; } }
public class class_name { public java.util.List<UnsuccessfulItem> getUnsuccessful() { if (unsuccessful == null) { unsuccessful = new com.amazonaws.internal.SdkInternalList<UnsuccessfulItem>(); // depends on control dependency: [if], data = [none] } return unsuccessful; } }
public class class_name { void validate(KnowledgeSource knowledgeSource) throws LinkValidationFailedException, KnowledgeSourceReadException { List<String> invalidPropIds = new ArrayList<>(); List<PropositionDefinition> propDefs = knowledgeSource.readPropositionDefinitions(this.propIdsAsSet.toArray(new String[this.propIdsAsSet.size()])); Set<String> foundPropIds = new HashSet<>(); for (PropositionDefinition propDef : propDefs) { foundPropIds.add(propDef.getId()); } for (String propId : this.propIdsAsSet) { if (!foundPropIds.contains(propId)) { invalidPropIds.add(propId); } } if (!invalidPropIds.isEmpty()) { throw new LinkValidationFailedException( "Invalid proposition id(s): " + StringUtils.join(invalidPropIds, ", ")); } } }
public class class_name { void validate(KnowledgeSource knowledgeSource) throws LinkValidationFailedException, KnowledgeSourceReadException { List<String> invalidPropIds = new ArrayList<>(); List<PropositionDefinition> propDefs = knowledgeSource.readPropositionDefinitions(this.propIdsAsSet.toArray(new String[this.propIdsAsSet.size()])); Set<String> foundPropIds = new HashSet<>(); for (PropositionDefinition propDef : propDefs) { foundPropIds.add(propDef.getId()); } for (String propId : this.propIdsAsSet) { if (!foundPropIds.contains(propId)) { invalidPropIds.add(propId); // depends on control dependency: [if], data = [none] } } if (!invalidPropIds.isEmpty()) { throw new LinkValidationFailedException( "Invalid proposition id(s): " + StringUtils.join(invalidPropIds, ", ")); } } }
public class class_name { public static SettingsSchema get(Map<String, String[]> clArgs, SettingsCommand command) { String[] params = clArgs.remove("-schema"); if (params == null) return new SettingsSchema(new Options(), command); if (command instanceof SettingsCommandUser) throw new IllegalArgumentException("-schema can only be provided with predefined operations insert, read, etc.; the 'user' command requires a schema yaml instead"); GroupedOptions options = GroupedOptions.select(params, new Options()); if (options == null) { printHelp(); System.out.println("Invalid -schema options provided, see output for valid options"); System.exit(1); } return new SettingsSchema((Options) options, command); } }
public class class_name { public static SettingsSchema get(Map<String, String[]> clArgs, SettingsCommand command) { String[] params = clArgs.remove("-schema"); if (params == null) return new SettingsSchema(new Options(), command); if (command instanceof SettingsCommandUser) throw new IllegalArgumentException("-schema can only be provided with predefined operations insert, read, etc.; the 'user' command requires a schema yaml instead"); GroupedOptions options = GroupedOptions.select(params, new Options()); if (options == null) { printHelp(); // depends on control dependency: [if], data = [none] System.out.println("Invalid -schema options provided, see output for valid options"); // depends on control dependency: [if], data = [none] System.exit(1); // depends on control dependency: [if], data = [none] } return new SettingsSchema((Options) options, command); } }
public class class_name { public List<Instruction> reduceInstructions(final List<Instruction> instructions) { lock.lock(); try { this.instructions = instructions; stackSizeSimulator.buildStackSizes(instructions); return reduceInstructionsInternal(instructions); } finally { lock.unlock(); } } }
public class class_name { public List<Instruction> reduceInstructions(final List<Instruction> instructions) { lock.lock(); try { this.instructions = instructions; // depends on control dependency: [try], data = [none] stackSizeSimulator.buildStackSizes(instructions); // depends on control dependency: [try], data = [none] return reduceInstructionsInternal(instructions); // depends on control dependency: [try], data = [none] } finally { lock.unlock(); } } }
public class class_name { public void write(int b) throws IOException { while (true) { lockW.lock(); try { // fail if closed checkClosed(); if (length - availableCount.get() > 0) { bytes[writeCursor] = (byte) b; incWriteCursor(1); availableCount.incrementAndGet(); totalWritten++; return; } } finally { lockW.unlock(); } sleep(1); } } }
public class class_name { public void write(int b) throws IOException { while (true) { lockW.lock(); try { // fail if closed checkClosed(); // depends on control dependency: [try], data = [none] if (length - availableCount.get() > 0) { bytes[writeCursor] = (byte) b; // depends on control dependency: [if], data = [none] incWriteCursor(1); // depends on control dependency: [if], data = [none] availableCount.incrementAndGet(); // depends on control dependency: [if], data = [none] totalWritten++; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } finally { lockW.unlock(); } sleep(1); } } }
public class class_name { public PrivateKey getPrivateKey(String alias, String password) { Key key = getKey(alias, password); if (key instanceof PrivateKey) { return (PrivateKey) key; } else { throw new IllegalStateException(format("Key with alias '%s' was not a private key, but was: %s", alias, key.getClass().getSimpleName())); } } }
public class class_name { public PrivateKey getPrivateKey(String alias, String password) { Key key = getKey(alias, password); if (key instanceof PrivateKey) { return (PrivateKey) key; // depends on control dependency: [if], data = [none] } else { throw new IllegalStateException(format("Key with alias '%s' was not a private key, but was: %s", alias, key.getClass().getSimpleName())); } } }
public class class_name { public static SerializationFormat getOutputFormat(String name) { for (SerializationFormat ft : Instance.serializationFormats) { if (ft.isAcceptedAsOutput(name)) { return ft; } } return null; } }
public class class_name { public static SerializationFormat getOutputFormat(String name) { for (SerializationFormat ft : Instance.serializationFormats) { if (ft.isAcceptedAsOutput(name)) { return ft; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { private void populateAnnotations(Collection<Annotation> annotations) { for (Annotation each : annotations) { this.annotations.put(each.annotationType(), each); } } }
public class class_name { private void populateAnnotations(Collection<Annotation> annotations) { for (Annotation each : annotations) { this.annotations.put(each.annotationType(), each); // depends on control dependency: [for], data = [each] } } }
public class class_name { public static void printStoreMap(Reflections reflections) { LOGGER.info("Now we will print store map......"); Store store = reflections.getStore(); Map<String/* indexName */, Multimap<String, String>> storeMap = store.getStoreMap(); for (String indexName : storeMap.keySet()) { LOGGER.info("===================================="); LOGGER.info("indexName:" + indexName); Multimap<String, String> multimap = storeMap.get(indexName); for (String firstName : multimap.keySet()) { Collection<String> lastNames = multimap.get(firstName); LOGGER.info("\t\t" + firstName + ": " + lastNames); } } } }
public class class_name { public static void printStoreMap(Reflections reflections) { LOGGER.info("Now we will print store map......"); Store store = reflections.getStore(); Map<String/* indexName */, Multimap<String, String>> storeMap = store.getStoreMap(); for (String indexName : storeMap.keySet()) { LOGGER.info("===================================="); // depends on control dependency: [for], data = [none] LOGGER.info("indexName:" + indexName); // depends on control dependency: [for], data = [indexName] Multimap<String, String> multimap = storeMap.get(indexName); for (String firstName : multimap.keySet()) { Collection<String> lastNames = multimap.get(firstName); LOGGER.info("\t\t" + firstName + ": " + lastNames); // depends on control dependency: [for], data = [firstName] } } } }
public class class_name { Optional<String> writeMeter(Meter meter) { Iterable<Measurement> measurements = meter.measure(); List<String> names = new ArrayList<>(); // Snapshot values should be used throughout this method as there are chances for values to be changed in-between. List<Double> values = new ArrayList<>(); for (Measurement measurement : measurements) { double value = measurement.getValue(); if (!Double.isFinite(value)) { continue; } names.add(measurement.getStatistic().getTagValueRepresentation()); values.add(value); } if (names.isEmpty()) { return Optional.empty(); } return Optional.of(writeDocument(meter, builder -> { for (int i = 0; i < names.size(); i++) { builder.append(",\"").append(names.get(i)).append("\":\"").append(values.get(i)).append("\""); } })); } }
public class class_name { Optional<String> writeMeter(Meter meter) { Iterable<Measurement> measurements = meter.measure(); List<String> names = new ArrayList<>(); // Snapshot values should be used throughout this method as there are chances for values to be changed in-between. List<Double> values = new ArrayList<>(); for (Measurement measurement : measurements) { double value = measurement.getValue(); if (!Double.isFinite(value)) { continue; } names.add(measurement.getStatistic().getTagValueRepresentation()); // depends on control dependency: [for], data = [measurement] values.add(value); // depends on control dependency: [for], data = [none] } if (names.isEmpty()) { return Optional.empty(); // depends on control dependency: [if], data = [none] } return Optional.of(writeDocument(meter, builder -> { for (int i = 0; i < names.size(); i++) { builder.append(",\"").append(names.get(i)).append("\":\"").append(values.get(i)).append("\""); } })); } }
public class class_name { private void generateEntity(M2MEntity entity) { entity.propertyPrimaryKey = TypeName.LONG; entity.propertyKey1 = findPrimaryKeyFieldType(entity.entity1Name.toString()); entity.propertyKey2 = findPrimaryKeyFieldType(entity.entity2Name.toString()); if (!entity.needToCreate) { return; } String tableName = entity.tableName; String entityClassName = entity.name; AnnotationProcessorUtilis.infoOnGeneratedClasses(BindDaoMany2Many.class, entity.getPackageName(), entityClassName); Converter<String, String> converterFK = CaseFormat.LOWER_CAMEL.converterTo(CaseFormat.UPPER_CAMEL); Converter<String, String> converterFieldName = CaseFormat.UPPER_CAMEL.converterTo(CaseFormat.LOWER_CAMEL); Converter<String, String> converterField2ColumnName = CaseFormat.LOWER_CAMEL .converterTo(CaseFormat.LOWER_UNDERSCORE); String fkPrefix = converterFK.convert(entity.idName); String fk1Name = converterField2ColumnName.convert(entity.entity1Name.simpleName() + fkPrefix); String fk2Name = converterField2ColumnName.convert(entity.entity2Name.simpleName() + fkPrefix); String field1Name = converterFieldName.convert(entity.entity1Name.simpleName() + fkPrefix); String field2Name = converterFieldName.convert(entity.entity2Name.simpleName() + fkPrefix); List<SQLProperty> properties = new ArrayList<SQLProperty>(); // we define property type later { SQLProperty property = new SQLProperty(entity.idName, entity.getClassName(), entity.propertyPrimaryKey); property.columnType = ColumnType.PRIMARY_KEY; property.columnName = entity.idName; property.setNullable(false); property.setPrimaryKey(true); property.foreignParentClassName = null; properties.add(property); } { SQLProperty property = new SQLProperty(field1Name, entity.getClassName(), entity.propertyKey1); property.columnType = ColumnType.INDEXED; property.columnName = fk1Name; property.setNullable(false); property.setPrimaryKey(false); property.onDeleteAction = ForeignKeyAction.CASCADE; property.foreignParentClassName = entity.entity1Name.toString(); properties.add(property); } { SQLProperty property = new SQLProperty(field2Name, entity.getClassName(), entity.propertyKey2); property.columnType = ColumnType.INDEXED; property.columnName = fk2Name; property.setNullable(false); property.setPrimaryKey(false); property.onDeleteAction = ForeignKeyAction.CASCADE; property.foreignParentClassName = entity.entity2Name.toString(); properties.add(property); } // @formatter:off classBuilder = TypeSpec.classBuilder(entityClassName).addModifiers(Modifier.PUBLIC) .addAnnotation(AnnotationSpec.builder(BindSqlType.class).addMember("name", "$S", tableName).build()); // @formatter:on if (entity.immutable) { // create constructor Builder constructorBuilder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); for (SQLProperty p : properties) { constructorBuilder .addParameter(ParameterSpec.builder(p.getPropertyType().getTypeName(), p.getName()).build()); constructorBuilder.addStatement("this.$L=$L", p.getName(), p.getName()); } classBuilder.addMethod(constructorBuilder.build()); } Modifier fieldModifier = entity.immutable ? Modifier.PRIVATE : Modifier.PUBLIC; // javadoc for class classBuilder.addJavadoc("<p>"); classBuilder.addJavadoc("\nGenerated entity implementation for <code>$L</code>\n", entity.name); classBuilder.addJavadoc("</p>\n"); JavadocUtility.generateJavadocGeneratedBy(classBuilder); // classBuilder.addJavadoc(" @see $T\n", { // @formatter:off FieldSpec fieldSpec = FieldSpec.builder(entity.propertyPrimaryKey, entity.idName, fieldModifier) .addJavadoc("Primary key\n") .addAnnotation(AnnotationSpec.builder(BindSqlColumn.class) .addMember("columnType", "$T.$L", ColumnType.class, ColumnType.PRIMARY_KEY).build()) .build(); // @formatter:on classBuilder.addField(fieldSpec); } { // @formatter:off FieldSpec fieldSpec = FieldSpec .builder(entity.propertyKey1, field1Name, fieldModifier) .addJavadoc("Foreign key to $T model class\n", entity.entity1Name) .addAnnotation(AnnotationSpec.builder(BindSqlColumn.class) .addMember(AnnotationAttributeType.PARENT_ENTITY.getValue(), "$T.class", entity.entity1Name) .addMember(AnnotationAttributeType.ON_DELETE.getValue(), "$T.$L", ForeignKeyAction.class, ForeignKeyAction.CASCADE) .build()) .build(); // @formatter:on classBuilder.addField(fieldSpec); } { // @formatter:off FieldSpec fieldSpec = FieldSpec .builder(entity.propertyKey2, field2Name, fieldModifier) .addJavadoc("Foreign key to $T model class\n", entity.entity2Name) .addAnnotation(AnnotationSpec.builder(BindSqlColumn.class) .addMember(AnnotationAttributeType.PARENT_ENTITY.getValue(), "$T.class", entity.entity2Name) .addMember(AnnotationAttributeType.ON_DELETE.getValue(), "$T.$L", ForeignKeyAction.class, ForeignKeyAction.CASCADE) .build()) .build(); // @formatter:on classBuilder.addField(fieldSpec); } if (entity.immutable) { // getters for (SQLProperty p : properties) { Builder methodBuilder = MethodSpec .methodBuilder("get" + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, p.getName())) .returns(p.getPropertyType().getTypeName()).addStatement("return this.$L", p.getName()) .addModifiers(Modifier.PUBLIC); classBuilder.addMethod(methodBuilder.build()); } } TypeSpec typeSpec = classBuilder.build(); try { JavaWriterHelper.writeJava2File(filer, entity.getPackageName(), typeSpec); } catch (IOException e) { throw new KriptonRuntimeException(e); } GeneratedTypeElement entityElement = new GeneratedTypeElement(entity.getPackageName(), classBuilder.build(), tableName, fk1Name + ", " + fk2Name); entityElement.properties = properties; entityResult.add(entityElement); } }
public class class_name { private void generateEntity(M2MEntity entity) { entity.propertyPrimaryKey = TypeName.LONG; entity.propertyKey1 = findPrimaryKeyFieldType(entity.entity1Name.toString()); entity.propertyKey2 = findPrimaryKeyFieldType(entity.entity2Name.toString()); if (!entity.needToCreate) { return; // depends on control dependency: [if], data = [none] } String tableName = entity.tableName; String entityClassName = entity.name; AnnotationProcessorUtilis.infoOnGeneratedClasses(BindDaoMany2Many.class, entity.getPackageName(), entityClassName); Converter<String, String> converterFK = CaseFormat.LOWER_CAMEL.converterTo(CaseFormat.UPPER_CAMEL); Converter<String, String> converterFieldName = CaseFormat.UPPER_CAMEL.converterTo(CaseFormat.LOWER_CAMEL); Converter<String, String> converterField2ColumnName = CaseFormat.LOWER_CAMEL .converterTo(CaseFormat.LOWER_UNDERSCORE); String fkPrefix = converterFK.convert(entity.idName); String fk1Name = converterField2ColumnName.convert(entity.entity1Name.simpleName() + fkPrefix); String fk2Name = converterField2ColumnName.convert(entity.entity2Name.simpleName() + fkPrefix); String field1Name = converterFieldName.convert(entity.entity1Name.simpleName() + fkPrefix); String field2Name = converterFieldName.convert(entity.entity2Name.simpleName() + fkPrefix); List<SQLProperty> properties = new ArrayList<SQLProperty>(); // we define property type later { SQLProperty property = new SQLProperty(entity.idName, entity.getClassName(), entity.propertyPrimaryKey); property.columnType = ColumnType.PRIMARY_KEY; property.columnName = entity.idName; property.setNullable(false); property.setPrimaryKey(true); property.foreignParentClassName = null; properties.add(property); } { SQLProperty property = new SQLProperty(field1Name, entity.getClassName(), entity.propertyKey1); property.columnType = ColumnType.INDEXED; property.columnName = fk1Name; property.setNullable(false); property.setPrimaryKey(false); property.onDeleteAction = ForeignKeyAction.CASCADE; property.foreignParentClassName = entity.entity1Name.toString(); properties.add(property); } { SQLProperty property = new SQLProperty(field2Name, entity.getClassName(), entity.propertyKey2); property.columnType = ColumnType.INDEXED; property.columnName = fk2Name; property.setNullable(false); property.setPrimaryKey(false); property.onDeleteAction = ForeignKeyAction.CASCADE; property.foreignParentClassName = entity.entity2Name.toString(); properties.add(property); } // @formatter:off classBuilder = TypeSpec.classBuilder(entityClassName).addModifiers(Modifier.PUBLIC) .addAnnotation(AnnotationSpec.builder(BindSqlType.class).addMember("name", "$S", tableName).build()); // @formatter:on if (entity.immutable) { // create constructor Builder constructorBuilder = MethodSpec.constructorBuilder().addModifiers(Modifier.PUBLIC); for (SQLProperty p : properties) { constructorBuilder .addParameter(ParameterSpec.builder(p.getPropertyType().getTypeName(), p.getName()).build()); // depends on control dependency: [for], data = [none] constructorBuilder.addStatement("this.$L=$L", p.getName(), p.getName()); // depends on control dependency: [for], data = [p] } classBuilder.addMethod(constructorBuilder.build()); // depends on control dependency: [if], data = [none] } Modifier fieldModifier = entity.immutable ? Modifier.PRIVATE : Modifier.PUBLIC; // javadoc for class classBuilder.addJavadoc("<p>"); classBuilder.addJavadoc("\nGenerated entity implementation for <code>$L</code>\n", entity.name); classBuilder.addJavadoc("</p>\n"); JavadocUtility.generateJavadocGeneratedBy(classBuilder); // classBuilder.addJavadoc(" @see $T\n", { // @formatter:off FieldSpec fieldSpec = FieldSpec.builder(entity.propertyPrimaryKey, entity.idName, fieldModifier) .addJavadoc("Primary key\n") .addAnnotation(AnnotationSpec.builder(BindSqlColumn.class) .addMember("columnType", "$T.$L", ColumnType.class, ColumnType.PRIMARY_KEY).build()) .build(); // @formatter:on classBuilder.addField(fieldSpec); } { // @formatter:off FieldSpec fieldSpec = FieldSpec .builder(entity.propertyKey1, field1Name, fieldModifier) .addJavadoc("Foreign key to $T model class\n", entity.entity1Name) .addAnnotation(AnnotationSpec.builder(BindSqlColumn.class) .addMember(AnnotationAttributeType.PARENT_ENTITY.getValue(), "$T.class", entity.entity1Name) .addMember(AnnotationAttributeType.ON_DELETE.getValue(), "$T.$L", ForeignKeyAction.class, ForeignKeyAction.CASCADE) .build()) .build(); // @formatter:on classBuilder.addField(fieldSpec); } { // @formatter:off FieldSpec fieldSpec = FieldSpec .builder(entity.propertyKey2, field2Name, fieldModifier) .addJavadoc("Foreign key to $T model class\n", entity.entity2Name) .addAnnotation(AnnotationSpec.builder(BindSqlColumn.class) .addMember(AnnotationAttributeType.PARENT_ENTITY.getValue(), "$T.class", entity.entity2Name) .addMember(AnnotationAttributeType.ON_DELETE.getValue(), "$T.$L", ForeignKeyAction.class, ForeignKeyAction.CASCADE) .build()) .build(); // @formatter:on classBuilder.addField(fieldSpec); } if (entity.immutable) { // getters for (SQLProperty p : properties) { Builder methodBuilder = MethodSpec .methodBuilder("get" + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, p.getName())) .returns(p.getPropertyType().getTypeName()).addStatement("return this.$L", p.getName()) .addModifiers(Modifier.PUBLIC); classBuilder.addMethod(methodBuilder.build()); // depends on control dependency: [for], data = [none] } } TypeSpec typeSpec = classBuilder.build(); try { JavaWriterHelper.writeJava2File(filer, entity.getPackageName(), typeSpec); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new KriptonRuntimeException(e); } // depends on control dependency: [catch], data = [none] GeneratedTypeElement entityElement = new GeneratedTypeElement(entity.getPackageName(), classBuilder.build(), tableName, fk1Name + ", " + fk2Name); entityElement.properties = properties; entityResult.add(entityElement); } }
public class class_name { public void log(Logger logger, LogLevel priority) { if (LoggerWrap.isEnabledFor(logger, priority)) { String timerLabel; if (threadId != 0) { timerLabel = name + " (thread " + threadId + ")"; } else if (threadCount > 1) { timerLabel = name + " (over " + threadCount + " threads)"; } else { timerLabel = name; } if (todoFlags == RECORD_NONE) { LoggerWrap.log(logger, priority, "Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); } else { String labels = ""; String values = ""; String separator; if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels += "CPU"; values += totalCpuTime / 1000000; separator = "/"; } else { separator = ""; } if ((todoFlags & RECORD_WALLTIME) != 0) { labels += separator + "Wall"; values += separator + totalWallTime / 1000000; } if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels += "/CPU avg"; values += "/" + (float) (totalCpuTime) / measurements / 1000000; } if ((todoFlags & RECORD_WALLTIME) != 0) { labels += "/Wall avg"; values += "/" + (float) (totalWallTime) / measurements / 1000000; } if (threadCount > 1) { if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels += "/CPU per thread"; values += "/" + (float) (totalCpuTime) / threadCount / 1000000; } if ((todoFlags & RECORD_WALLTIME) != 0) { labels += "/Wall per thread"; values += "/" + (float) (totalWallTime) / threadCount / 1000000; } } LoggerWrap.log(logger, priority, "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); } if (isRunning) { logger.warn("Timer " + timerLabel + " logged while it was still running"); } } } }
public class class_name { public void log(Logger logger, LogLevel priority) { if (LoggerWrap.isEnabledFor(logger, priority)) { String timerLabel; if (threadId != 0) { timerLabel = name + " (thread " + threadId + ")"; // depends on control dependency: [if], data = [none] } else if (threadCount > 1) { timerLabel = name + " (over " + threadCount + " threads)"; // depends on control dependency: [if], data = [none] } else { timerLabel = name; // depends on control dependency: [if], data = [none] } if (todoFlags == RECORD_NONE) { LoggerWrap.log(logger, priority, "Timer " + timerLabel + " recorded " + measurements + " run(s), no times taken"); // depends on control dependency: [if], data = [none] } else { String labels = ""; String values = ""; String separator; if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels += "CPU"; // depends on control dependency: [if], data = [none] values += totalCpuTime / 1000000; // depends on control dependency: [if], data = [none] separator = "/"; // depends on control dependency: [if], data = [none] } else { separator = ""; // depends on control dependency: [if], data = [none] } if ((todoFlags & RECORD_WALLTIME) != 0) { labels += separator + "Wall"; // depends on control dependency: [if], data = [none] values += separator + totalWallTime / 1000000; // depends on control dependency: [if], data = [none] } if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels += "/CPU avg"; // depends on control dependency: [if], data = [none] values += "/" + (float) (totalCpuTime) / measurements / 1000000; // depends on control dependency: [if], data = [none] } if ((todoFlags & RECORD_WALLTIME) != 0) { labels += "/Wall avg"; // depends on control dependency: [if], data = [none] values += "/" + (float) (totalWallTime) / measurements / 1000000; // depends on control dependency: [if], data = [none] } if (threadCount > 1) { if ((todoFlags & RECORD_CPUTIME) != 0 && threadId != 0) { labels += "/CPU per thread"; // depends on control dependency: [if], data = [none] values += "/" + (float) (totalCpuTime) / threadCount / 1000000; // depends on control dependency: [if], data = [none] } if ((todoFlags & RECORD_WALLTIME) != 0) { labels += "/Wall per thread"; // depends on control dependency: [if], data = [none] values += "/" + (float) (totalWallTime) / threadCount / 1000000; // depends on control dependency: [if], data = [none] } } LoggerWrap.log(logger, priority, "Time for " + timerLabel + " for " + measurements + " run(s) " + labels + " (ms): " + values); // depends on control dependency: [if], data = [none] } if (isRunning) { logger.warn("Timer " + timerLabel + " logged while it was still running"); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override public EEnum getIfcWindowStyleConstructionEnum() { if (ifcWindowStyleConstructionEnumEEnum == null) { ifcWindowStyleConstructionEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(1104); } return ifcWindowStyleConstructionEnumEEnum; } }
public class class_name { @Override public EEnum getIfcWindowStyleConstructionEnum() { if (ifcWindowStyleConstructionEnumEEnum == null) { ifcWindowStyleConstructionEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(1104); // depends on control dependency: [if], data = [none] } return ifcWindowStyleConstructionEnumEEnum; } }
public class class_name { private boolean unpackLibrary(String path) { boolean retval = false; try { final Enumeration<URL> c = JNILibrary.class.getClassLoader() .getResources(path); while (c.hasMoreElements()) { final URL url = c.nextElement(); log.trace("path: {}; url: {}", path, url); if (url == null) return false; boolean unpacked = false; File lib; if (url.getProtocol().toLowerCase().equals("file")) { // it SHOULD already exist on the disk. let's look for it. try { lib = new File(new URI(url.toString())); } catch (URISyntaxException e) { lib = new File(url.getPath()); } if (!lib.exists()) { log.error("Unpacked library not unpacked correctedly; url: {}", url); continue; } } else if (url.getProtocol().toLowerCase().equals("jar")){ // sucktastic -- we cannot in a JVM load a shared library // directly from a JAR, so we need to unpack to a temp // directory and load from there. InputStream stream = url.openStream(); if (stream == null) { log.error("could not get stream for resource: {}", url.getPath()); continue; } FileOutputStream out = null; try { File dir = getTmpDir(); // did you know windows REQUIRES .dll. Sigh. lib = File .createTempFile( "humble", JNIEnv.getEnv().getOSFamily() == JNIEnv.OSFamily.WINDOWS ? ".dll" : null, dir); lib.deleteOnExit(); out = new FileOutputStream(lib); int bytesRead = 0; final byte[] buffer = new byte[2048]; while ((bytesRead = stream.read(buffer, 0, buffer.length)) > 0) { out.write(buffer, 0, bytesRead); } unpacked = true; } catch (IOException e) { log.error("could not create temp file: {}", e); continue; } finally { try { stream.close(); } catch (IOException e) { } if (out != null) try { out.close(); } catch (IOException e) { } } try { doJNILoad(lib.getAbsolutePath()); retval = true; break; } catch (UnsatisfiedLinkError e) { // expected in some cases, try the next case. } finally { if (unpacked) { // Well let's try to clean up after ourselves since // we had ot unpack. deleteUnpackedFile(lib.getAbsolutePath()); } } } } } catch (IOException e1) { retval = false; } return retval; } }
public class class_name { private boolean unpackLibrary(String path) { boolean retval = false; try { final Enumeration<URL> c = JNILibrary.class.getClassLoader() .getResources(path); while (c.hasMoreElements()) { final URL url = c.nextElement(); log.trace("path: {}; url: {}", path, url); // depends on control dependency: [while], data = [none] if (url == null) return false; boolean unpacked = false; File lib; if (url.getProtocol().toLowerCase().equals("file")) { // it SHOULD already exist on the disk. let's look for it. try { lib = new File(new URI(url.toString())); // depends on control dependency: [try], data = [none] } catch (URISyntaxException e) { lib = new File(url.getPath()); } // depends on control dependency: [catch], data = [none] if (!lib.exists()) { log.error("Unpacked library not unpacked correctedly; url: {}", // depends on control dependency: [if], data = [none] url); // depends on control dependency: [if], data = [none] continue; } } else if (url.getProtocol().toLowerCase().equals("jar")){ // sucktastic -- we cannot in a JVM load a shared library // directly from a JAR, so we need to unpack to a temp // directory and load from there. InputStream stream = url.openStream(); if (stream == null) { log.error("could not get stream for resource: {}", url.getPath()); // depends on control dependency: [if], data = [none] continue; } FileOutputStream out = null; try { File dir = getTmpDir(); // did you know windows REQUIRES .dll. Sigh. lib = File .createTempFile( "humble", JNIEnv.getEnv().getOSFamily() == JNIEnv.OSFamily.WINDOWS ? ".dll" : null, dir); // depends on control dependency: [try], data = [none] lib.deleteOnExit(); // depends on control dependency: [try], data = [none] out = new FileOutputStream(lib); // depends on control dependency: [try], data = [none] int bytesRead = 0; final byte[] buffer = new byte[2048]; while ((bytesRead = stream.read(buffer, 0, buffer.length)) > 0) { out.write(buffer, 0, bytesRead); // depends on control dependency: [while], data = [none] } unpacked = true; // depends on control dependency: [try], data = [none] } catch (IOException e) { log.error("could not create temp file: {}", e); continue; } finally { // depends on control dependency: [catch], data = [none] try { stream.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { } // depends on control dependency: [catch], data = [none] if (out != null) try { out.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { } // depends on control dependency: [catch], data = [none] } try { doJNILoad(lib.getAbsolutePath()); // depends on control dependency: [try], data = [none] retval = true; // depends on control dependency: [try], data = [none] break; } catch (UnsatisfiedLinkError e) { // expected in some cases, try the next case. } finally { // depends on control dependency: [catch], data = [none] if (unpacked) { // Well let's try to clean up after ourselves since // we had ot unpack. deleteUnpackedFile(lib.getAbsolutePath()); // depends on control dependency: [if], data = [none] } } } } } catch (IOException e1) { retval = false; } // depends on control dependency: [catch], data = [none] return retval; } }
public class class_name { protected void removeNeighborFromRoutingTable(final TrustGraphNodeId neighbor) { /* find the neighbor that the node being removed is mapped to * ie the route neighbor -> mergeVal */ final TrustGraphNodeId mergeVal = routingTable.get(neighbor); // if it is mapped to itself, just remove it and return. if (mergeVal.equals(neighbor)) { routingTable.remove(neighbor); // this should only happen when there was only a single entry. if (!routingTable.isEmpty()) { assert false;//routingTable.isEmpty(); } return; } /* If it wasn't mapped to itself, find the neighbor that is * currently mapped to the neighbor being removed, ie the * route mergeKey -> neighbor. */ TrustGraphNodeId mergeKey = null; for (Map.Entry<TrustGraphNodeId,TrustGraphNodeId> e : routingTable.entrySet()) { if (e.getValue().equals(neighbor)) { mergeKey = e.getKey(); break; } } assert mergeKey != null; /** * Atomically merge the route X->neighbor, neighbor->Y into the * route X->Y.This preserves the ability to route to Y. Finally, * remove the mapping neighbor->Y. */ routingTable.replace(mergeKey, mergeVal); routingTable.remove(neighbor); } }
public class class_name { protected void removeNeighborFromRoutingTable(final TrustGraphNodeId neighbor) { /* find the neighbor that the node being removed is mapped to * ie the route neighbor -> mergeVal */ final TrustGraphNodeId mergeVal = routingTable.get(neighbor); // if it is mapped to itself, just remove it and return. if (mergeVal.equals(neighbor)) { routingTable.remove(neighbor); // depends on control dependency: [if], data = [none] // this should only happen when there was only a single entry. if (!routingTable.isEmpty()) { assert false;//routingTable.isEmpty(); } return; // depends on control dependency: [if], data = [none] } /* If it wasn't mapped to itself, find the neighbor that is * currently mapped to the neighbor being removed, ie the * route mergeKey -> neighbor. */ TrustGraphNodeId mergeKey = null; for (Map.Entry<TrustGraphNodeId,TrustGraphNodeId> e : routingTable.entrySet()) { if (e.getValue().equals(neighbor)) { mergeKey = e.getKey(); // depends on control dependency: [if], data = [none] break; } } assert mergeKey != null; /** * Atomically merge the route X->neighbor, neighbor->Y into the * route X->Y.This preserves the ability to route to Y. Finally, * remove the mapping neighbor->Y. */ routingTable.replace(mergeKey, mergeVal); routingTable.remove(neighbor); } }
public class class_name { @Override public Position get(Position pos, List<Event<T>> list) { EventBatch<T> b; // Return null if the position is out of retention or in the indexed form. if(pos.getOffset() < getOrigin() || pos.isIndexed()) { return null; } // Get events from _batch b = _batch; if(b.getOrigin() <= pos.getOffset()) { long newOffset = b.get(pos.getOffset(), list); Clock clock = pos.getOffset() < newOffset ? b.getClock(newOffset - 1) : pos.getClock(); return new SimplePosition(getId(), newOffset, clock); } // Get events from _lastBatch b = _lastBatch; if(b != null && b.getOrigin() <= pos.getOffset()) { long newOffset = b.get(pos.getOffset(), list); Clock clock = pos.getOffset() < newOffset ? b.getClock(newOffset - 1) : pos.getClock(); return new SimplePosition(getId(), newOffset, clock); } // Get events from batches in retention int cnt = 0; Iterator<EventBatchCursor> iter = _retentionQueue.iterator(); while(iter.hasNext()) { EventBatchCursor c = iter.next(); if(c.getHeader().getOrigin() <= pos.getOffset()) { byte[] dat = _store.get(c.getLookup()); try { b = _eventBatchSerializer.deserialize(dat); long newOffset = b.get(pos.getOffset(), list); if(pos.getOffset() < newOffset) { Clock clock = b.getClock(newOffset - 1); return new SimplePosition(getId(), newOffset, clock); } } catch(Exception e) { _logger.warn("Ignored EventBatch: " + c.getHeader().getOrigin()); } } else { // early stop if(cnt == 0) { break; } } cnt++; } return null; } }
public class class_name { @Override public Position get(Position pos, List<Event<T>> list) { EventBatch<T> b; // Return null if the position is out of retention or in the indexed form. if(pos.getOffset() < getOrigin() || pos.isIndexed()) { return null; // depends on control dependency: [if], data = [none] } // Get events from _batch b = _batch; if(b.getOrigin() <= pos.getOffset()) { long newOffset = b.get(pos.getOffset(), list); Clock clock = pos.getOffset() < newOffset ? b.getClock(newOffset - 1) : pos.getClock(); return new SimplePosition(getId(), newOffset, clock); // depends on control dependency: [if], data = [none] } // Get events from _lastBatch b = _lastBatch; if(b != null && b.getOrigin() <= pos.getOffset()) { long newOffset = b.get(pos.getOffset(), list); Clock clock = pos.getOffset() < newOffset ? b.getClock(newOffset - 1) : pos.getClock(); return new SimplePosition(getId(), newOffset, clock); // depends on control dependency: [if], data = [none] } // Get events from batches in retention int cnt = 0; Iterator<EventBatchCursor> iter = _retentionQueue.iterator(); while(iter.hasNext()) { EventBatchCursor c = iter.next(); if(c.getHeader().getOrigin() <= pos.getOffset()) { byte[] dat = _store.get(c.getLookup()); try { b = _eventBatchSerializer.deserialize(dat); // depends on control dependency: [try], data = [none] long newOffset = b.get(pos.getOffset(), list); if(pos.getOffset() < newOffset) { Clock clock = b.getClock(newOffset - 1); return new SimplePosition(getId(), newOffset, clock); // depends on control dependency: [if], data = [none] } } catch(Exception e) { _logger.warn("Ignored EventBatch: " + c.getHeader().getOrigin()); } // depends on control dependency: [catch], data = [none] } else { // early stop if(cnt == 0) { break; } } cnt++; // depends on control dependency: [while], data = [none] } return null; } }
public class class_name { bbBlock truncate(Object caller, int version, int pos) { assert mutation_in_progress(caller, version); if (0 > pos || pos > this._buf_limit ) throw new IllegalArgumentException(); // clear out all the blocks in use from the last in use // to the block where the eof will be located bbBlock b = null; for (int idx = this._next_block_position - 1; idx >= 0; idx--) { b = this._blocks.get(idx); if (b._offset <= pos) break; b.clearBlock(); } if (b == null) { throw new IllegalStateException("block missing at position "+pos); } // reset the next block position to account for this. this._next_block_position = b._idx + 1; // on the block where eof is, set it's limit appropriately b._limit = pos - b._offset; // set the overall buffer limits this._buf_limit = pos; b = this.findBlockForRead(pos, version, b, pos); return b; } }
public class class_name { bbBlock truncate(Object caller, int version, int pos) { assert mutation_in_progress(caller, version); if (0 > pos || pos > this._buf_limit ) throw new IllegalArgumentException(); // clear out all the blocks in use from the last in use // to the block where the eof will be located bbBlock b = null; for (int idx = this._next_block_position - 1; idx >= 0; idx--) { b = this._blocks.get(idx); // depends on control dependency: [for], data = [idx] if (b._offset <= pos) break; b.clearBlock(); // depends on control dependency: [for], data = [none] } if (b == null) { throw new IllegalStateException("block missing at position "+pos); } // reset the next block position to account for this. this._next_block_position = b._idx + 1; // on the block where eof is, set it's limit appropriately b._limit = pos - b._offset; // set the overall buffer limits this._buf_limit = pos; b = this.findBlockForRead(pos, version, b, pos); return b; } }
public class class_name { public void setStyle(String style) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_style)) { m_component.removeStyleName(m_style); } m_style = style; if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_style)) { m_component.addStyleName(m_style); } } }
public class class_name { public void setStyle(String style) { if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_style)) { m_component.removeStyleName(m_style); // depends on control dependency: [if], data = [none] } m_style = style; if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(m_style)) { m_component.addStyleName(m_style); // depends on control dependency: [if], data = [none] } } }
public class class_name { public List<Concept> getPathToRoot() { LinkedList<Concept> path = new LinkedList<>(); Concept node = this; while (node != null) { path.add(node); node = node.parent; } return path; } }
public class class_name { public List<Concept> getPathToRoot() { LinkedList<Concept> path = new LinkedList<>(); Concept node = this; while (node != null) { path.add(node); // depends on control dependency: [while], data = [(node] node = node.parent; // depends on control dependency: [while], data = [none] } return path; } }
public class class_name { public Map<K, List<Versioned<V>>> getAllWithCustomTimeout(CompositeVoldemortRequest<K, V> requestWrapper) { validateTimeout(requestWrapper.getRoutingTimeoutInMs()); Map<K, List<Versioned<V>>> items = null; for(int attempts = 0;; attempts++) { if(attempts >= this.metadataRefreshAttempts) throw new VoldemortException(this.metadataRefreshAttempts + " metadata refresh attempts failed."); try { String KeysHexString = ""; long startTimeInMs = System.currentTimeMillis(); if(logger.isDebugEnabled()) { Iterable<ByteArray> keys = (Iterable<ByteArray>) requestWrapper.getIterableKeys(); KeysHexString = getKeysHexString(keys); debugLogStart("GET_ALL", requestWrapper.getRequestOriginTimeInMs(), startTimeInMs, KeysHexString); } items = store.getAll(requestWrapper); if(logger.isDebugEnabled()) { int vcEntrySize = 0; for(List<Versioned<V>> item: items.values()) { for(Versioned<V> vc: item) { vcEntrySize += ((VectorClock) vc.getVersion()).getVersionMap().size(); } } debugLogEnd("GET_ALL", requestWrapper.getRequestOriginTimeInMs(), startTimeInMs, System.currentTimeMillis(), KeysHexString, vcEntrySize); } return items; } catch(InvalidMetadataException e) { logger.info("Received invalid metadata exception during getAll [ " + e.getMessage() + " ] on store '" + storeName + "'. Rebootstrapping"); bootStrap(); } } } }
public class class_name { public Map<K, List<Versioned<V>>> getAllWithCustomTimeout(CompositeVoldemortRequest<K, V> requestWrapper) { validateTimeout(requestWrapper.getRoutingTimeoutInMs()); Map<K, List<Versioned<V>>> items = null; for(int attempts = 0;; attempts++) { if(attempts >= this.metadataRefreshAttempts) throw new VoldemortException(this.metadataRefreshAttempts + " metadata refresh attempts failed."); try { String KeysHexString = ""; long startTimeInMs = System.currentTimeMillis(); if(logger.isDebugEnabled()) { Iterable<ByteArray> keys = (Iterable<ByteArray>) requestWrapper.getIterableKeys(); KeysHexString = getKeysHexString(keys); // depends on control dependency: [if], data = [none] debugLogStart("GET_ALL", requestWrapper.getRequestOriginTimeInMs(), startTimeInMs, KeysHexString); // depends on control dependency: [if], data = [none] } items = store.getAll(requestWrapper); // depends on control dependency: [try], data = [none] if(logger.isDebugEnabled()) { int vcEntrySize = 0; for(List<Versioned<V>> item: items.values()) { for(Versioned<V> vc: item) { vcEntrySize += ((VectorClock) vc.getVersion()).getVersionMap().size(); // depends on control dependency: [for], data = [vc] } } debugLogEnd("GET_ALL", requestWrapper.getRequestOriginTimeInMs(), startTimeInMs, System.currentTimeMillis(), KeysHexString, vcEntrySize); // depends on control dependency: [if], data = [none] } return items; // depends on control dependency: [try], data = [none] } catch(InvalidMetadataException e) { logger.info("Received invalid metadata exception during getAll [ " + e.getMessage() + " ] on store '" + storeName + "'. Rebootstrapping"); bootStrap(); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public static void write( DataOutputStream out, Map<String, String> importedAnnotations, Map<String, Set<String>> packageAnnotations, Map<String, Set<String>> typeAnnotations, Map<String, MethodAnnotationsRecord> methodRecords) throws IOException { // File format version/magic number out.writeInt(VERSION_0_FILE_MAGIC_NUMBER); // Followed by the number of string dictionary entries int numStringEntires = 0; Map<String, Integer> encodingDictionary = new LinkedHashMap<>(); List<String> strings = new ArrayList<String>(); List<Collection<String>> keysets = ImmutableList.of( importedAnnotations.values(), packageAnnotations.keySet(), typeAnnotations.keySet(), methodRecords.keySet()); for (Collection<String> keyset : keysets) { for (String key : keyset) { assert !encodingDictionary.containsKey(key); strings.add(key); encodingDictionary.put(key, numStringEntires); ++numStringEntires; } } out.writeInt(numStringEntires); // Followed by the entries themselves for (String s : strings) { out.writeUTF(s); } // Followed by the number of encoded package annotation records int packageAnnotationSize = 0; for (Map.Entry<String, Set<String>> entry : packageAnnotations.entrySet()) { packageAnnotationSize += entry.getValue().size(); } out.writeInt(packageAnnotationSize); // Followed by those records as pairs of ints pointing into the dictionary for (Map.Entry<String, Set<String>> entry : packageAnnotations.entrySet()) { for (String annot : entry.getValue()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } // Followed by the number of encoded type annotation records int typeAnnotationSize = 0; for (Map.Entry<String, Set<String>> entry : typeAnnotations.entrySet()) { typeAnnotationSize += entry.getValue().size(); } out.writeInt(typeAnnotationSize); // Followed by those records as pairs of ints pointing into the dictionary for (Map.Entry<String, Set<String>> entry : typeAnnotations.entrySet()) { for (String annot : entry.getValue()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } // Followed by the number of encoded method return/declaration annotation records int methodAnnotationSize = 0; int methodArgumentRecordsSize = 0; for (Map.Entry<String, MethodAnnotationsRecord> entry : methodRecords.entrySet()) { methodAnnotationSize += entry.getValue().getMethodAnnotations().size(); methodArgumentRecordsSize += entry.getValue().getArgumentAnnotations().size(); } out.writeInt(methodAnnotationSize); // Followed by those records as pairs of ints pointing into the dictionary for (Map.Entry<String, MethodAnnotationsRecord> entry : methodRecords.entrySet()) { for (String annot : entry.getValue().getMethodAnnotations()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } // Followed by the number of encoded method argument annotation records out.writeInt(methodArgumentRecordsSize); // Followed by those records as a triplet of ints ( 0 and 2 point in the dictionary, 1 is the // argument position) for (Map.Entry<String, MethodAnnotationsRecord> entry : methodRecords.entrySet()) { for (Map.Entry<Integer, ImmutableSet<String>> argEntry : entry.getValue().getArgumentAnnotations().entrySet()) { for (String annot : argEntry.getValue()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(argEntry.getKey()); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } } } }
public class class_name { public static void write( DataOutputStream out, Map<String, String> importedAnnotations, Map<String, Set<String>> packageAnnotations, Map<String, Set<String>> typeAnnotations, Map<String, MethodAnnotationsRecord> methodRecords) throws IOException { // File format version/magic number out.writeInt(VERSION_0_FILE_MAGIC_NUMBER); // Followed by the number of string dictionary entries int numStringEntires = 0; Map<String, Integer> encodingDictionary = new LinkedHashMap<>(); List<String> strings = new ArrayList<String>(); List<Collection<String>> keysets = ImmutableList.of( importedAnnotations.values(), packageAnnotations.keySet(), typeAnnotations.keySet(), methodRecords.keySet()); for (Collection<String> keyset : keysets) { for (String key : keyset) { assert !encodingDictionary.containsKey(key); // depends on control dependency: [for], data = [key] strings.add(key); // depends on control dependency: [for], data = [key] encodingDictionary.put(key, numStringEntires); // depends on control dependency: [for], data = [key] ++numStringEntires; // depends on control dependency: [for], data = [none] } } out.writeInt(numStringEntires); // Followed by the entries themselves for (String s : strings) { out.writeUTF(s); } // Followed by the number of encoded package annotation records int packageAnnotationSize = 0; for (Map.Entry<String, Set<String>> entry : packageAnnotations.entrySet()) { packageAnnotationSize += entry.getValue().size(); } out.writeInt(packageAnnotationSize); // Followed by those records as pairs of ints pointing into the dictionary for (Map.Entry<String, Set<String>> entry : packageAnnotations.entrySet()) { for (String annot : entry.getValue()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } // Followed by the number of encoded type annotation records int typeAnnotationSize = 0; for (Map.Entry<String, Set<String>> entry : typeAnnotations.entrySet()) { typeAnnotationSize += entry.getValue().size(); } out.writeInt(typeAnnotationSize); // Followed by those records as pairs of ints pointing into the dictionary for (Map.Entry<String, Set<String>> entry : typeAnnotations.entrySet()) { for (String annot : entry.getValue()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } // Followed by the number of encoded method return/declaration annotation records int methodAnnotationSize = 0; int methodArgumentRecordsSize = 0; for (Map.Entry<String, MethodAnnotationsRecord> entry : methodRecords.entrySet()) { methodAnnotationSize += entry.getValue().getMethodAnnotations().size(); methodArgumentRecordsSize += entry.getValue().getArgumentAnnotations().size(); } out.writeInt(methodAnnotationSize); // Followed by those records as pairs of ints pointing into the dictionary for (Map.Entry<String, MethodAnnotationsRecord> entry : methodRecords.entrySet()) { for (String annot : entry.getValue().getMethodAnnotations()) { out.writeInt(encodingDictionary.get(entry.getKey())); out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); } } // Followed by the number of encoded method argument annotation records out.writeInt(methodArgumentRecordsSize); // Followed by those records as a triplet of ints ( 0 and 2 point in the dictionary, 1 is the // argument position) for (Map.Entry<String, MethodAnnotationsRecord> entry : methodRecords.entrySet()) { for (Map.Entry<Integer, ImmutableSet<String>> argEntry : entry.getValue().getArgumentAnnotations().entrySet()) { for (String annot : argEntry.getValue()) { out.writeInt(encodingDictionary.get(entry.getKey())); // depends on control dependency: [for], data = [none] out.writeInt(argEntry.getKey()); // depends on control dependency: [for], data = [none] out.writeInt(encodingDictionary.get(importedAnnotations.get(annot))); // depends on control dependency: [for], data = [annot] } } } } }
public class class_name { private LazyFileBasedLongCollection getCollectionForKey(final K key) { LazyFileBasedLongCollection collection = hashFiles.get(key); if (collection == null) { collection = new LazyFileBasedLongCollection(basePath + File.separator + key.hashCode() + ".lfc", clearOnOpen); hashFiles.put(key, collection); } return collection; } }
public class class_name { private LazyFileBasedLongCollection getCollectionForKey(final K key) { LazyFileBasedLongCollection collection = hashFiles.get(key); if (collection == null) { collection = new LazyFileBasedLongCollection(basePath + File.separator + key.hashCode() + ".lfc", clearOnOpen); // depends on control dependency: [if], data = [none] hashFiles.put(key, collection); // depends on control dependency: [if], data = [none] } return collection; } }
public class class_name { private void setResourceIdOnFaceletsMode(FacesContext facesContext, UIComponent component, Class<?> inspectedClass) { if (component.getId() == null) { FaceletCompositionContext mctx = FaceletCompositionContext.getCurrentInstance(facesContext); if (mctx != null) { UIViewRoot root = facesContext.getViewRoot(); root.getAttributes().put(RESOURCE_DEPENDENCY_UNIQUE_ID_KEY, Boolean.TRUE); try { String uid = root.createUniqueId(facesContext, null); component.setId(uid); } finally { root.getAttributes().put(RESOURCE_DEPENDENCY_UNIQUE_ID_KEY, Boolean.FALSE); } if (!mctx.isUsingPSSOnThisView()) { // Now set the identifier that will help to know which classes has been already inspected. component.getAttributes().put( RequestViewContext.RESOURCE_DEPENDENCY_INSPECTED_CLASS, inspectedClass); } else if (mctx.isRefreshTransientBuildOnPSSPreserveState()) { component.getAttributes().put( RequestViewContext.RESOURCE_DEPENDENCY_INSPECTED_CLASS, inspectedClass); } } else { // This happens when there is a programmatic addition, which means the user has added the // components to the tree on render response phase or earlier but outside facelets control. // In that case we need to save the dependency. component.getAttributes().put( RequestViewContext.RESOURCE_DEPENDENCY_INSPECTED_CLASS, inspectedClass); } } } }
public class class_name { private void setResourceIdOnFaceletsMode(FacesContext facesContext, UIComponent component, Class<?> inspectedClass) { if (component.getId() == null) { FaceletCompositionContext mctx = FaceletCompositionContext.getCurrentInstance(facesContext); if (mctx != null) { UIViewRoot root = facesContext.getViewRoot(); root.getAttributes().put(RESOURCE_DEPENDENCY_UNIQUE_ID_KEY, Boolean.TRUE); // depends on control dependency: [if], data = [none] try { String uid = root.createUniqueId(facesContext, null); component.setId(uid); // depends on control dependency: [try], data = [none] } finally { root.getAttributes().put(RESOURCE_DEPENDENCY_UNIQUE_ID_KEY, Boolean.FALSE); } if (!mctx.isUsingPSSOnThisView()) { // Now set the identifier that will help to know which classes has been already inspected. component.getAttributes().put( RequestViewContext.RESOURCE_DEPENDENCY_INSPECTED_CLASS, inspectedClass); // depends on control dependency: [if], data = [none] } else if (mctx.isRefreshTransientBuildOnPSSPreserveState()) { component.getAttributes().put( RequestViewContext.RESOURCE_DEPENDENCY_INSPECTED_CLASS, inspectedClass); // depends on control dependency: [if], data = [none] } } else { // This happens when there is a programmatic addition, which means the user has added the // components to the tree on render response phase or earlier but outside facelets control. // In that case we need to save the dependency. component.getAttributes().put( RequestViewContext.RESOURCE_DEPENDENCY_INSPECTED_CLASS, inspectedClass); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected final void setModelResources(final URI... modelResources) { if (modelResources == null) { this.modelResources = null; } else { this.modelResources = new ArrayList<URI>(); this.modelResources.addAll(Arrays.asList(modelResources)); } } }
public class class_name { protected final void setModelResources(final URI... modelResources) { if (modelResources == null) { this.modelResources = null; // depends on control dependency: [if], data = [none] } else { this.modelResources = new ArrayList<URI>(); // depends on control dependency: [if], data = [none] this.modelResources.addAll(Arrays.asList(modelResources)); // depends on control dependency: [if], data = [(modelResources] } } }
public class class_name { public boolean createEntity(Dao dao, Entity<?> en) { StringBuilder sb = new StringBuilder("CREATE TABLE " + en.getTableName() + "("); // 创建字段 for (MappingField mf : en.getMappingFields()) { if (mf.isReadonly()) continue; sb.append('\n').append(mf.getColumnNameInSql()); sb.append(' ').append(evalFieldType(mf)); // 非主键的 @Name,应该加入唯一性约束 if (mf.isName() && en.getPkType() != PkType.NAME) { sb.append(" UNIQUE NOT NULL"); } // 普通字段 else { if (mf.isNotNull() || mf.isPk()) sb.append(" NOT NULL"); if (mf.hasDefaultValue()) { addDefaultValue(sb, mf); } if (mf.isAutoIncreasement()) sb.append(" generated by default as identity "); if (mf.isPk() && en.getPks().size() == 1) { sb.append(" primary key "); } } sb.append(','); } // 结束表字段设置 sb.setCharAt(sb.length() - 1, ')'); //指定表空间 if(en.hasMeta(META_TABLESPACE)){ sb.append(String.format(" IN %s", en.getMeta(META_TABLESPACE)) ); } // 执行创建语句 dao.execute(Sqls.create(sb.toString())); // 创建联合主键 if (en.getPks().size() > 1) { sb = new StringBuilder(); sb.append("ALTER TABLE ").append(en.getTableName()).append(" ADD CONSTRAINT PK_"); sb.append(makePksName(en)); sb.append(" PRIMARY KEY ("); for (MappingField mf : en.getPks()) { sb.append(mf.getColumnNameInSql()).append(","); } sb.setCharAt(sb.length() - 1, ')'); dao.execute(Sqls.create(sb.toString())); } // 创建关联表 createRelation(dao, en); // 创建索引 dao.execute(createIndexs(en).toArray(new Sql[0])); // 添加注释(表注释与字段注释) addComment(dao, en); return true; } }
public class class_name { public boolean createEntity(Dao dao, Entity<?> en) { StringBuilder sb = new StringBuilder("CREATE TABLE " + en.getTableName() + "("); // 创建字段 for (MappingField mf : en.getMappingFields()) { if (mf.isReadonly()) continue; sb.append('\n').append(mf.getColumnNameInSql()); // depends on control dependency: [for], data = [mf] sb.append(' ').append(evalFieldType(mf)); // depends on control dependency: [for], data = [mf] // 非主键的 @Name,应该加入唯一性约束 if (mf.isName() && en.getPkType() != PkType.NAME) { sb.append(" UNIQUE NOT NULL"); // depends on control dependency: [if], data = [none] } // 普通字段 else { if (mf.isNotNull() || mf.isPk()) sb.append(" NOT NULL"); if (mf.hasDefaultValue()) { addDefaultValue(sb, mf); // depends on control dependency: [if], data = [none] } if (mf.isAutoIncreasement()) sb.append(" generated by default as identity "); if (mf.isPk() && en.getPks().size() == 1) { sb.append(" primary key "); // depends on control dependency: [if], data = [none] } } sb.append(','); // depends on control dependency: [for], data = [none] } // 结束表字段设置 sb.setCharAt(sb.length() - 1, ')'); //指定表空间 if(en.hasMeta(META_TABLESPACE)){ sb.append(String.format(" IN %s", en.getMeta(META_TABLESPACE)) ); // depends on control dependency: [if], data = [none] } // 执行创建语句 dao.execute(Sqls.create(sb.toString())); // 创建联合主键 if (en.getPks().size() > 1) { sb = new StringBuilder(); // depends on control dependency: [if], data = [none] sb.append("ALTER TABLE ").append(en.getTableName()).append(" ADD CONSTRAINT PK_"); // depends on control dependency: [if], data = [none] sb.append(makePksName(en)); // depends on control dependency: [if], data = [none] sb.append(" PRIMARY KEY ("); // depends on control dependency: [if], data = [none] for (MappingField mf : en.getPks()) { sb.append(mf.getColumnNameInSql()).append(","); // depends on control dependency: [for], data = [mf] } sb.setCharAt(sb.length() - 1, ')'); // depends on control dependency: [if], data = [none] dao.execute(Sqls.create(sb.toString())); // depends on control dependency: [if], data = [none] } // 创建关联表 createRelation(dao, en); // 创建索引 dao.execute(createIndexs(en).toArray(new Sql[0])); // 添加注释(表注释与字段注释) addComment(dao, en); return true; } }
public class class_name { public void setEndpoint(String endpoint) { this.endpoint = endpoint; if (bugsnag != null) { bugsnag.setEndpoints(endpoint, null); } } }
public class class_name { public void setEndpoint(String endpoint) { this.endpoint = endpoint; if (bugsnag != null) { bugsnag.setEndpoints(endpoint, null); // depends on control dependency: [if], data = [null)] } } }
public class class_name { public static Iterator<Object> getValues(Object[] array) { if (array == null) { return null; } ArrayList<Object> valueList = new ArrayList<Object>(); int i = array.length - 1; while (i >= 0) { valueList.add(array[i]); i = i - 2; } return valueList.iterator(); } }
public class class_name { public static Iterator<Object> getValues(Object[] array) { if (array == null) { return null; // depends on control dependency: [if], data = [none] } ArrayList<Object> valueList = new ArrayList<Object>(); int i = array.length - 1; while (i >= 0) { valueList.add(array[i]); // depends on control dependency: [while], data = [none] i = i - 2; // depends on control dependency: [while], data = [none] } return valueList.iterator(); } }
public class class_name { public StrBuilder insert(final int index, final Object obj) { if (obj == null) { return insert(index, nullText); } return insert(index, obj.toString()); } }
public class class_name { public StrBuilder insert(final int index, final Object obj) { if (obj == null) { return insert(index, nullText); // depends on control dependency: [if], data = [none] } return insert(index, obj.toString()); } }
public class class_name { void handleInitialRequest(HttpServerExchange initial, Http2Channel channel, byte[] data) { //we have a request Http2HeadersStreamSinkChannel sink = channel.createInitialUpgradeResponseStream(); final Http2ServerConnection connection = new Http2ServerConnection(channel, sink, undertowOptions, bufferSize, rootHandler); HeaderMap requestHeaders = new HeaderMap(); for(HeaderValues hv : initial.getRequestHeaders()) { requestHeaders.putAll(hv.getHeaderName(), hv); } final HttpServerExchange exchange = new HttpServerExchange(connection, requestHeaders, sink.getHeaders(), maxEntitySize); if(initial.getRequestHeaders().contains(Headers.EXPECT)) { HttpContinue.markContinueResponseSent(exchange); } if(initial.getAttachment(HttpAttachments.REQUEST_TRAILERS) != null) { exchange.putAttachment(HttpAttachments.REQUEST_TRAILERS, initial.getAttachment(HttpAttachments.REQUEST_TRAILERS)); } Connectors.setRequestStartTime(initial, exchange); connection.setExchange(exchange); exchange.setRequestScheme(initial.getRequestScheme()); exchange.setRequestMethod(initial.getRequestMethod()); exchange.setQueryString(initial.getQueryString()); if(data != null) { Connectors.ungetRequestBytes(exchange, new ImmediatePooledByteBuffer(ByteBuffer.wrap(data))); } else { Connectors.terminateRequest(exchange); } String uri = exchange.getQueryString().isEmpty() ? initial.getRequestURI() : initial.getRequestURI() + '?' + exchange.getQueryString(); try { Connectors.setExchangeRequestPath(exchange, uri, encoding, decode, allowEncodingSlash, decodeBuffer, maxParameters); } catch (ParameterLimitException e) { exchange.setStatusCode(StatusCodes.BAD_REQUEST); exchange.endExchange(); return; } handleCommonSetup(sink, exchange, connection); Connectors.executeRootHandler(rootHandler, exchange); } }
public class class_name { void handleInitialRequest(HttpServerExchange initial, Http2Channel channel, byte[] data) { //we have a request Http2HeadersStreamSinkChannel sink = channel.createInitialUpgradeResponseStream(); final Http2ServerConnection connection = new Http2ServerConnection(channel, sink, undertowOptions, bufferSize, rootHandler); HeaderMap requestHeaders = new HeaderMap(); for(HeaderValues hv : initial.getRequestHeaders()) { requestHeaders.putAll(hv.getHeaderName(), hv); // depends on control dependency: [for], data = [hv] } final HttpServerExchange exchange = new HttpServerExchange(connection, requestHeaders, sink.getHeaders(), maxEntitySize); if(initial.getRequestHeaders().contains(Headers.EXPECT)) { HttpContinue.markContinueResponseSent(exchange); // depends on control dependency: [if], data = [none] } if(initial.getAttachment(HttpAttachments.REQUEST_TRAILERS) != null) { exchange.putAttachment(HttpAttachments.REQUEST_TRAILERS, initial.getAttachment(HttpAttachments.REQUEST_TRAILERS)); // depends on control dependency: [if], data = [none] } Connectors.setRequestStartTime(initial, exchange); connection.setExchange(exchange); exchange.setRequestScheme(initial.getRequestScheme()); exchange.setRequestMethod(initial.getRequestMethod()); exchange.setQueryString(initial.getQueryString()); if(data != null) { Connectors.ungetRequestBytes(exchange, new ImmediatePooledByteBuffer(ByteBuffer.wrap(data))); // depends on control dependency: [if], data = [(data] } else { Connectors.terminateRequest(exchange); // depends on control dependency: [if], data = [none] } String uri = exchange.getQueryString().isEmpty() ? initial.getRequestURI() : initial.getRequestURI() + '?' + exchange.getQueryString(); try { Connectors.setExchangeRequestPath(exchange, uri, encoding, decode, allowEncodingSlash, decodeBuffer, maxParameters); // depends on control dependency: [try], data = [none] } catch (ParameterLimitException e) { exchange.setStatusCode(StatusCodes.BAD_REQUEST); exchange.endExchange(); return; } // depends on control dependency: [catch], data = [none] handleCommonSetup(sink, exchange, connection); Connectors.executeRootHandler(rootHandler, exchange); } }
public class class_name { public QueryParameters get(int index) { if (getCurrentResultSet() == null && index > 0 && valueCached(index) == false) { // don't have anymore values in cache return null; } QueryParameters params = null; try { if (valueCached(index) == false) { if (this.type == Type.READ_ONLY_FORWARD && currentIndex >= index) { throw new MjdbcRuntimeException("Attempt to read current/previous value failed because it is not present in cache. " + "Please increase maximum cache size via overrider or MjdbcConfig."); } if (this.type == Type.READ_ONLY_FORWARD) { for (int i = currentIndex; i < index; i++) { params = null; do { params = convertResultSetNextLine(getCurrentResultSet()); if (params == null) { closeResultSet(getCurrentResultSet()); setCurrentResultSet(getNextResultSet()); } } while (params == null && getCurrentResultSet() != null); if (params != null) { updateCache(i + 1, params); currentIndex++; } else { if (getCurrentResultSet() == null) { break; } } } } else if (this.type == Type.READ_ONLY_SCROLL || this.type == Type.UPDATE_SCROLL) { params = readResultSetRow((index + 1) - generatedCacheMap.size()); if (params != null) { updateCache(index, params); } } } else { params = readCachedValue(index); } } catch (SQLException ex) { throw new MjdbcRuntimeException("Failed to read ResultSet", ex); } if (params == null) { // It seems appropriate to return null rather then exception. // This might be changed in the future } return params; } }
public class class_name { public QueryParameters get(int index) { if (getCurrentResultSet() == null && index > 0 && valueCached(index) == false) { // don't have anymore values in cache return null; // depends on control dependency: [if], data = [none] } QueryParameters params = null; try { if (valueCached(index) == false) { if (this.type == Type.READ_ONLY_FORWARD && currentIndex >= index) { throw new MjdbcRuntimeException("Attempt to read current/previous value failed because it is not present in cache. " + "Please increase maximum cache size via overrider or MjdbcConfig."); } if (this.type == Type.READ_ONLY_FORWARD) { for (int i = currentIndex; i < index; i++) { params = null; // depends on control dependency: [for], data = [none] do { params = convertResultSetNextLine(getCurrentResultSet()); if (params == null) { closeResultSet(getCurrentResultSet()); // depends on control dependency: [if], data = [none] setCurrentResultSet(getNextResultSet()); // depends on control dependency: [if], data = [none] } } while (params == null && getCurrentResultSet() != null); if (params != null) { updateCache(i + 1, params); // depends on control dependency: [if], data = [none] currentIndex++; // depends on control dependency: [if], data = [none] } else { if (getCurrentResultSet() == null) { break; } } } } else if (this.type == Type.READ_ONLY_SCROLL || this.type == Type.UPDATE_SCROLL) { params = readResultSetRow((index + 1) - generatedCacheMap.size()); // depends on control dependency: [if], data = [none] if (params != null) { updateCache(index, params); // depends on control dependency: [if], data = [none] } } } else { params = readCachedValue(index); // depends on control dependency: [if], data = [none] } } catch (SQLException ex) { throw new MjdbcRuntimeException("Failed to read ResultSet", ex); } // depends on control dependency: [catch], data = [none] if (params == null) { // It seems appropriate to return null rather then exception. // This might be changed in the future } return params; } }
public class class_name { @WebOperationMethod public Map<String,Object> downloadAsFile(Map<String, Object> params, HttpServletRequest request, HttpServletResponse response) throws Exception { String database = Objects.get(params, "database"); String collection = Objects.get(params, "collection", COLLECTION); String id = Objects.get(params, "id"); String name = Objects.get(params, "name"); FileStorage.FileReadBean b = null; try{ b = FileStorage.read(new Id(database,collection,id)); response.setContentType(b.getMeta().getContentType()); if (Objects.isNullOrEmpty(name)) { name = b.getMeta().getName(); if (name.length() > 100) name = name.substring(0, 100); if (!Objects.isNullOrEmpty(b.getMeta().getExt())) name += "." + b.getMeta().getExt(); } response.addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(name, "UTF-8")); try { IOUtils.copy(b.getInputStream(), response.getOutputStream()); } catch (IOException e) { throw S1SystemError.wrap(e); } }catch (NotFoundException e) { response.setStatus(404); }finally { FileStorage.closeAfterRead(b); } return null; } }
public class class_name { @WebOperationMethod public Map<String,Object> downloadAsFile(Map<String, Object> params, HttpServletRequest request, HttpServletResponse response) throws Exception { String database = Objects.get(params, "database"); String collection = Objects.get(params, "collection", COLLECTION); String id = Objects.get(params, "id"); String name = Objects.get(params, "name"); FileStorage.FileReadBean b = null; try{ b = FileStorage.read(new Id(database,collection,id)); response.setContentType(b.getMeta().getContentType()); if (Objects.isNullOrEmpty(name)) { name = b.getMeta().getName(); // depends on control dependency: [if], data = [none] if (name.length() > 100) name = name.substring(0, 100); if (!Objects.isNullOrEmpty(b.getMeta().getExt())) name += "." + b.getMeta().getExt(); } response.addHeader("Content-Disposition", "attachment;filename=" + URLEncoder.encode(name, "UTF-8")); try { IOUtils.copy(b.getInputStream(), response.getOutputStream()); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw S1SystemError.wrap(e); } // depends on control dependency: [catch], data = [none] }catch (NotFoundException e) { response.setStatus(404); }finally { FileStorage.closeAfterRead(b); } return null; } }
public class class_name { public void initialize(UimaContext aContext) throws ResourceInitializationException { super.initialize(aContext); // initialize annotator logger this.logger = getContext().getLogger(); // default initialization for number format this.floatNumberFormat = NumberFormat.getNumberInstance(); this.integerNumberFormat = NumberFormat.getIntegerInstance(); // create a concept file parser object ConceptFileParser parser = new ConceptFileParser_impl(); // get UIMA datapath and tokenize it into its elements StringTokenizer tokenizer = new StringTokenizer(getContext().getDataPath(), PATH_SEPARATOR); ArrayList<File> datapathElements = new ArrayList<File>(); while (tokenizer.hasMoreTokens()) { // add datapath elements to the 'datapathElements' array list datapathElements.add(new File(tokenizer.nextToken())); } // try to resolve the concept file names ArrayList<Concept> concepts = new ArrayList<Concept>(); for (int i = 0; i < conceptFileNames.length; i++) { // try to resolve the relative file name with classpath or datapath String filename = conceptFileNames[i]; List<ConceptFile> cfList = new ArrayList<ConceptFile>(); if (containsWildcardChar(filename)) { resolveRelativeWildcardFilePath(filename, datapathElements, cfList); } else { ConceptFile file = resolveRelativeFilePath(filename, datapathElements); // if the current concept file wasn't found, throw an exception if (file == null) { throw new RegexAnnotatorConfigException("regex_annotator_resource_not_found", new Object[] { conceptFileNames[i] }); } cfList.add(file); // log concept file path this.logger.logrb(Level.CONFIG, "RegExAnnotator", "initialize", MESSAGE_DIGEST, "regex_annotator_rule_set_file", new Object[] { file.getFilePath() }); } for (ConceptFile file : cfList) { // parse concept file to internal objects Concept[] currentConcepts = parser.parseConceptFile(file.getFilePath(), file.getStream()); try { file.getStream().close(); } catch (IOException e) { this.logger.logrb(Level.WARNING, "RegExAnnotator", "initialize", MESSAGE_DIGEST, "regex_annotator_error_closing_input_stream", new Object[] { file.getFilePath(), e.getMessage() }); } // add all concepts to the concepts list for (int c = 0; c < currentConcepts.length; c++) { concepts.add(currentConcepts[c]); } } } // get one array that contains all the concepts this.regexConcepts = concepts.toArray(new Concept[] {}); // check duplicate concept names HashSet<String> conceptNames = new HashSet<String>(this.regexConcepts.length); for (int i = 0; i < this.regexConcepts.length; i++) { String name = this.regexConcepts[i].getName(); // check if concept name was set, if not, skip concept if (name == null) { continue; } // concept name was set, check for duplicate concept names // duplicate concept names can occurs, just log a warning! if (conceptNames.contains(name)) { this.logger.logrb(Level.WARNING, "RegExAnnotator", "initialize", MESSAGE_DIGEST, "regex_annotator_warning_duplicate_concept_name", new Object[] { name }); } else { // add concept name to the concept name list conceptNames.add(name); } } // initialize the regex concepts for (int i = 0; i < this.regexConcepts.length; i++) { ((Concept_impl) this.regexConcepts[i]).initialize(this.logger); } } }
public class class_name { public void initialize(UimaContext aContext) throws ResourceInitializationException { super.initialize(aContext); // initialize annotator logger this.logger = getContext().getLogger(); // default initialization for number format this.floatNumberFormat = NumberFormat.getNumberInstance(); this.integerNumberFormat = NumberFormat.getIntegerInstance(); // create a concept file parser object ConceptFileParser parser = new ConceptFileParser_impl(); // get UIMA datapath and tokenize it into its elements StringTokenizer tokenizer = new StringTokenizer(getContext().getDataPath(), PATH_SEPARATOR); ArrayList<File> datapathElements = new ArrayList<File>(); while (tokenizer.hasMoreTokens()) { // add datapath elements to the 'datapathElements' array list datapathElements.add(new File(tokenizer.nextToken())); } // try to resolve the concept file names ArrayList<Concept> concepts = new ArrayList<Concept>(); for (int i = 0; i < conceptFileNames.length; i++) { // try to resolve the relative file name with classpath or datapath String filename = conceptFileNames[i]; List<ConceptFile> cfList = new ArrayList<ConceptFile>(); if (containsWildcardChar(filename)) { resolveRelativeWildcardFilePath(filename, datapathElements, cfList); } else { ConceptFile file = resolveRelativeFilePath(filename, datapathElements); // if the current concept file wasn't found, throw an exception if (file == null) { throw new RegexAnnotatorConfigException("regex_annotator_resource_not_found", new Object[] { conceptFileNames[i] }); } cfList.add(file); // log concept file path this.logger.logrb(Level.CONFIG, "RegExAnnotator", "initialize", MESSAGE_DIGEST, "regex_annotator_rule_set_file", new Object[] { file.getFilePath() }); } for (ConceptFile file : cfList) { // parse concept file to internal objects Concept[] currentConcepts = parser.parseConceptFile(file.getFilePath(), file.getStream()); try { file.getStream().close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { this.logger.logrb(Level.WARNING, "RegExAnnotator", "initialize", MESSAGE_DIGEST, "regex_annotator_error_closing_input_stream", new Object[] { file.getFilePath(), e.getMessage() }); } // depends on control dependency: [catch], data = [none] // add all concepts to the concepts list for (int c = 0; c < currentConcepts.length; c++) { concepts.add(currentConcepts[c]); // depends on control dependency: [for], data = [c] } } } // get one array that contains all the concepts this.regexConcepts = concepts.toArray(new Concept[] {}); // check duplicate concept names HashSet<String> conceptNames = new HashSet<String>(this.regexConcepts.length); for (int i = 0; i < this.regexConcepts.length; i++) { String name = this.regexConcepts[i].getName(); // check if concept name was set, if not, skip concept if (name == null) { continue; } // concept name was set, check for duplicate concept names // duplicate concept names can occurs, just log a warning! if (conceptNames.contains(name)) { this.logger.logrb(Level.WARNING, "RegExAnnotator", "initialize", MESSAGE_DIGEST, "regex_annotator_warning_duplicate_concept_name", new Object[] { name }); } else { // add concept name to the concept name list conceptNames.add(name); } } // initialize the regex concepts for (int i = 0; i < this.regexConcepts.length; i++) { ((Concept_impl) this.regexConcepts[i]).initialize(this.logger); } } }
public class class_name { @Override public boolean isJobInstancePurgeable(long jobInstanceId) { InstanceState instanceState = getJobInstance(jobInstanceId).getInstanceState(); if (instanceState.equals(InstanceState.SUBMITTED) || instanceState.equals(InstanceState.JMS_QUEUED) || instanceState.equals(InstanceState.JMS_CONSUMED) || instanceState.equals(InstanceState.DISPATCHED)) { return false; } else { return true; } } }
public class class_name { @Override public boolean isJobInstancePurgeable(long jobInstanceId) { InstanceState instanceState = getJobInstance(jobInstanceId).getInstanceState(); if (instanceState.equals(InstanceState.SUBMITTED) || instanceState.equals(InstanceState.JMS_QUEUED) || instanceState.equals(InstanceState.JMS_CONSUMED) || instanceState.equals(InstanceState.DISPATCHED)) { return false; // depends on control dependency: [if], data = [none] } else { return true; // depends on control dependency: [if], data = [none] } } }
public class class_name { public static boolean validateClassLoadable(ClassNotFoundException cnfe, ClassLoader cl) { try { String className = cnfe.getMessage(); Class.forName(className, false, cl); return true; } catch (ClassNotFoundException e) { return false; } catch (Exception e) { return false; } } }
public class class_name { public static boolean validateClassLoadable(ClassNotFoundException cnfe, ClassLoader cl) { try { String className = cnfe.getMessage(); Class.forName(className, false, cl); return true; // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException e) { return false; } // depends on control dependency: [catch], data = [none] catch (Exception e) { return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public List<CommonEvent> asCommonEvents(final JAXBContext ctx) { final List<CommonEvent> list = new ArrayList<CommonEvent>(); for (final Event event : events) { list.add(event.asCommonEvent(ctx)); } return list; } }
public class class_name { public List<CommonEvent> asCommonEvents(final JAXBContext ctx) { final List<CommonEvent> list = new ArrayList<CommonEvent>(); for (final Event event : events) { list.add(event.asCommonEvent(ctx)); // depends on control dependency: [for], data = [event] } return list; } }
public class class_name { protected String getSoapAction() { String soapAction = null; try { soapAction = getAttributeValueSmart(SOAP_ACTION); } catch (PropertyException ex) { logger.severeException(ex.getMessage(), ex); } if (soapAction == null) { // required by SOAP 1.1 (http://www.w3.org/TR/soap11/#_Toc478383528) String soapVersion = getSoapVersion(); if (soapVersion != null && soapVersion.equals(SOAP_VERSION_11)) soapAction = ""; } return soapAction; } }
public class class_name { protected String getSoapAction() { String soapAction = null; try { soapAction = getAttributeValueSmart(SOAP_ACTION); // depends on control dependency: [try], data = [none] } catch (PropertyException ex) { logger.severeException(ex.getMessage(), ex); } // depends on control dependency: [catch], data = [none] if (soapAction == null) { // required by SOAP 1.1 (http://www.w3.org/TR/soap11/#_Toc478383528) String soapVersion = getSoapVersion(); if (soapVersion != null && soapVersion.equals(SOAP_VERSION_11)) soapAction = ""; } return soapAction; } }
public class class_name { static PrefsTransform getUtilTransform(TypeName type) { String typeName = type.toString(); // Integer.class.getCanonicalName().equals(typeName) if (Date.class.getCanonicalName().equals(typeName)) { return new DatePrefsTransform(); } if (Locale.class.getCanonicalName().equals(typeName)) { return new LocalePrefsTransform(); } if (Currency.class.getCanonicalName().equals(typeName)) { return new CurrencyPrefsTransform(); } if (Calendar.class.getCanonicalName().equals(typeName)) { return new CalendarPrefsTransform(); } if (TimeZone.class.getCanonicalName().equals(typeName)) { return new TimeZonePrefsTransform(); } return null; } }
public class class_name { static PrefsTransform getUtilTransform(TypeName type) { String typeName = type.toString(); // Integer.class.getCanonicalName().equals(typeName) if (Date.class.getCanonicalName().equals(typeName)) { return new DatePrefsTransform(); // depends on control dependency: [if], data = [none] } if (Locale.class.getCanonicalName().equals(typeName)) { return new LocalePrefsTransform(); // depends on control dependency: [if], data = [none] } if (Currency.class.getCanonicalName().equals(typeName)) { return new CurrencyPrefsTransform(); // depends on control dependency: [if], data = [none] } if (Calendar.class.getCanonicalName().equals(typeName)) { return new CalendarPrefsTransform(); // depends on control dependency: [if], data = [none] } if (TimeZone.class.getCanonicalName().equals(typeName)) { return new TimeZonePrefsTransform(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { public static void addOrSetAttribute(final AttributesImpl atts, final Node att) { if (att.getNodeType() != Node.ATTRIBUTE_NODE) { throw new IllegalArgumentException(); } final Attr a = (Attr) att; String localName = a.getLocalName(); if (localName == null) { localName = a.getName(); final int i = localName.indexOf(':'); if (i != -1) { localName = localName.substring(i + 1); } } addOrSetAttribute(atts, a.getNamespaceURI() != null ? a.getNamespaceURI() : NULL_NS_URI, localName, a.getName() != null ? a.getName() : localName, a.isId() ? "ID" : "CDATA", a.getValue()); } }
public class class_name { public static void addOrSetAttribute(final AttributesImpl atts, final Node att) { if (att.getNodeType() != Node.ATTRIBUTE_NODE) { throw new IllegalArgumentException(); } final Attr a = (Attr) att; String localName = a.getLocalName(); if (localName == null) { localName = a.getName(); // depends on control dependency: [if], data = [none] final int i = localName.indexOf(':'); if (i != -1) { localName = localName.substring(i + 1); // depends on control dependency: [if], data = [(i] } } addOrSetAttribute(atts, a.getNamespaceURI() != null ? a.getNamespaceURI() : NULL_NS_URI, localName, a.getName() != null ? a.getName() : localName, a.isId() ? "ID" : "CDATA", a.getValue()); } }
public class class_name { @Nullable public String getState() { lifecycleLock.readLock().lock(); try { return state == null ? null : state.toString(); } finally { lifecycleLock.readLock().unlock(); } } }
public class class_name { @Nullable public String getState() { lifecycleLock.readLock().lock(); try { return state == null ? null : state.toString(); // depends on control dependency: [try], data = [none] } finally { lifecycleLock.readLock().unlock(); } } }
public class class_name { public void setPredecessorRuns(java.util.Collection<Predecessor> predecessorRuns) { if (predecessorRuns == null) { this.predecessorRuns = null; return; } this.predecessorRuns = new java.util.ArrayList<Predecessor>(predecessorRuns); } }
public class class_name { public void setPredecessorRuns(java.util.Collection<Predecessor> predecessorRuns) { if (predecessorRuns == null) { this.predecessorRuns = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.predecessorRuns = new java.util.ArrayList<Predecessor>(predecessorRuns); } }
public class class_name { public List<Sid> getSids(Authentication authentication) { Collection<? extends GrantedAuthority> authorities = roleHierarchy .getReachableGrantedAuthorities(authentication.getAuthorities()); List<Sid> sids = new ArrayList<>(authorities.size() + 1); sids.add(new PrincipalSid(authentication)); for (GrantedAuthority authority : authorities) { sids.add(new GrantedAuthoritySid(authority)); } return sids; } }
public class class_name { public List<Sid> getSids(Authentication authentication) { Collection<? extends GrantedAuthority> authorities = roleHierarchy .getReachableGrantedAuthorities(authentication.getAuthorities()); List<Sid> sids = new ArrayList<>(authorities.size() + 1); sids.add(new PrincipalSid(authentication)); for (GrantedAuthority authority : authorities) { sids.add(new GrantedAuthoritySid(authority)); // depends on control dependency: [for], data = [authority] } return sids; } }
public class class_name { public void start() { int errors = 0; // Use getFile() instead of direct access to fileName because // the function is overridden in RollingFileAppender, which // returns a value that doesn't necessarily match fileName. String file = getFile(); if (file != null) { file = getAbsoluteFilePath(file); addInfo("File property is set to [" + file + "]"); if (prudent) { if (!isAppend()) { setAppend(true); addWarn("Setting \"Append\" property to true on account of \"Prudent\" mode"); } } if (!lazyInit) { if (checkForFileCollisionInPreviousFileAppenders()) { addError("Collisions detected with FileAppender/RollingAppender instances defined earlier. Aborting."); addError(COLLISION_WITH_EARLIER_APPENDER_URL); errors++; } else { // file should be opened only if collision free try { openFile(file); } catch (IOException e) { errors++; addError("openFile(" + file + "," + append + ") failed", e); } } } else { // We'll initialize the file output stream later. Use a dummy for now // to satisfy OutputStreamAppender.start(). setOutputStream(new NOPOutputStream()); } } else { errors++; addError("\"File\" property not set for appender named [" + name + "]"); } if (errors == 0) { super.start(); } } }
public class class_name { public void start() { int errors = 0; // Use getFile() instead of direct access to fileName because // the function is overridden in RollingFileAppender, which // returns a value that doesn't necessarily match fileName. String file = getFile(); if (file != null) { file = getAbsoluteFilePath(file); // depends on control dependency: [if], data = [(file] addInfo("File property is set to [" + file + "]"); // depends on control dependency: [if], data = [none] if (prudent) { if (!isAppend()) { setAppend(true); // depends on control dependency: [if], data = [none] addWarn("Setting \"Append\" property to true on account of \"Prudent\" mode"); // depends on control dependency: [if], data = [none] } } if (!lazyInit) { if (checkForFileCollisionInPreviousFileAppenders()) { addError("Collisions detected with FileAppender/RollingAppender instances defined earlier. Aborting."); addError(COLLISION_WITH_EARLIER_APPENDER_URL); errors++; // depends on control dependency: [if], data = [none] } else { // file should be opened only if collision free try { openFile(file); // depends on control dependency: [try], data = [none] } catch (IOException e) { errors++; addError("openFile(" + file + "," + append + ") failed", e); } // depends on control dependency: [catch], data = [none] } } else { // We'll initialize the file output stream later. Use a dummy for now // to satisfy OutputStreamAppender.start(). setOutputStream(new NOPOutputStream()); // depends on control dependency: [if], data = [none] } } else { errors++; // depends on control dependency: [if], data = [none] addError("\"File\" property not set for appender named [" + name + "]"); // depends on control dependency: [if], data = [none] } if (errors == 0) { super.start(); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void push(long x) { if (this.queueSize == this.maxSize) { assert this.last == this.first; this.sumOfQueue -= this.elems.get(this.last); if ((++this.last) == this.maxSize) this.last = 0; } else this.queueSize++; this.sumOfQueue += x; this.elems.set(this.first, x); if ((++this.first) == this.maxSize) { this.first = 0; this.last = 0; } } }
public class class_name { public void push(long x) { if (this.queueSize == this.maxSize) { assert this.last == this.first; // depends on control dependency: [if], data = [none] this.sumOfQueue -= this.elems.get(this.last); // depends on control dependency: [if], data = [none] if ((++this.last) == this.maxSize) this.last = 0; } else this.queueSize++; this.sumOfQueue += x; this.elems.set(this.first, x); if ((++this.first) == this.maxSize) { this.first = 0; // depends on control dependency: [if], data = [none] this.last = 0; // depends on control dependency: [if], data = [none] } } }
public class class_name { private void handleFragmentResponseMessage(FragmentResponseMessage message) { if (isFragmentMisrouted(message)){ m_mailbox.send(message.getDestinationSiteId(), message); return; } final VoltTrace.TraceEventBatch traceLog = VoltTrace.log(VoltTrace.Category.SPI); // Send the message to the duplicate counter, if any DuplicateCounter counter = m_duplicateCounters.get(new DuplicateCounterKey(message.getTxnId(), message.getSpHandle())); final TransactionState txn = m_outstandingTxns.get(message.getTxnId()); if (counter != null) { String traceName = "recvfragment"; if (message.m_sourceHSId != m_mailbox.getHSId()) { traceName = "replicatefragment"; } String finalTraceName = traceName; if (traceLog != null) { traceLog.add(() -> VoltTrace.endAsync(finalTraceName, MiscUtils.hsIdPairTxnIdToString(m_mailbox.getHSId(), message.m_sourceHSId, message.getSpHandle(), message.getTxnId()), "status", message.getStatusCode())); } int result = counter.offer(message); if (result == DuplicateCounter.DONE) { if (txn != null && txn.isDone()) { setRepairLogTruncationHandle(txn.m_spHandle, txn.isLeaderMigrationInvolved()); } m_duplicateCounters.remove(new DuplicateCounterKey(message.getTxnId(), message.getSpHandle())); FragmentResponseMessage resp = (FragmentResponseMessage)counter.getLastResponse(); // MPI is tracking deps per partition HSID. We need to make // sure we write ours into the message getting sent to the MPI resp.setExecutorSiteId(m_mailbox.getHSId()); m_mailbox.send(counter.m_destinationId, resp); } else if (result == DuplicateCounter.MISMATCH) { VoltDB.crashGlobalVoltDB("HASH MISMATCH running multi-part procedure.", true, null); } else if (result == DuplicateCounter.ABORT) { VoltDB.crashGlobalVoltDB("PARTIAL ROLLBACK/ABORT running multi-part procedure.", true, null); } // doing duplicate suppression: all done. return; } // No k-safety means no replica: read/write queries on master. // K-safety: read-only queries (on master) or write queries (on replica). if ( (m_isLeader || (!m_isLeader && message.isExecutedOnPreviousLeader())) && m_sendToHSIds.length > 0 && message.getRespBufferable() && (txn == null || txn.isReadOnly()) ) { // on k-safety leader with safe reads configuration: one shot reads + normal multi-fragments MP reads // we will have to buffer these reads until previous writes acked in the cluster. long readTxnId = txn == null ? message.getSpHandle() : txn.m_spHandle; m_bufferedReadLog.offer(m_mailbox, message, readTxnId, m_repairLogTruncationHandle); return; } // for complete writes txn, we will advance the transaction point if (txn != null && !txn.isReadOnly() && txn.isDone()) { setRepairLogTruncationHandle(txn.m_spHandle, message.isExecutedOnPreviousLeader()); } if (traceLog != null) { traceLog.add(() -> VoltTrace.endAsync("recvfragment", MiscUtils.hsIdPairTxnIdToString(m_mailbox.getHSId(), message.m_sourceHSId, message.getSpHandle(), message.getTxnId()), "status", message.getStatusCode())); } m_mailbox.send(message.getDestinationSiteId(), message); } }
public class class_name { private void handleFragmentResponseMessage(FragmentResponseMessage message) { if (isFragmentMisrouted(message)){ m_mailbox.send(message.getDestinationSiteId(), message); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } final VoltTrace.TraceEventBatch traceLog = VoltTrace.log(VoltTrace.Category.SPI); // Send the message to the duplicate counter, if any DuplicateCounter counter = m_duplicateCounters.get(new DuplicateCounterKey(message.getTxnId(), message.getSpHandle())); final TransactionState txn = m_outstandingTxns.get(message.getTxnId()); if (counter != null) { String traceName = "recvfragment"; if (message.m_sourceHSId != m_mailbox.getHSId()) { traceName = "replicatefragment"; // depends on control dependency: [if], data = [none] } String finalTraceName = traceName; if (traceLog != null) { traceLog.add(() -> VoltTrace.endAsync(finalTraceName, MiscUtils.hsIdPairTxnIdToString(m_mailbox.getHSId(), message.m_sourceHSId, message.getSpHandle(), message.getTxnId()), "status", message.getStatusCode())); // depends on control dependency: [if], data = [none] } int result = counter.offer(message); if (result == DuplicateCounter.DONE) { if (txn != null && txn.isDone()) { setRepairLogTruncationHandle(txn.m_spHandle, txn.isLeaderMigrationInvolved()); // depends on control dependency: [if], data = [(txn] } m_duplicateCounters.remove(new DuplicateCounterKey(message.getTxnId(), message.getSpHandle())); // depends on control dependency: [if], data = [none] FragmentResponseMessage resp = (FragmentResponseMessage)counter.getLastResponse(); // MPI is tracking deps per partition HSID. We need to make // sure we write ours into the message getting sent to the MPI resp.setExecutorSiteId(m_mailbox.getHSId()); // depends on control dependency: [if], data = [none] m_mailbox.send(counter.m_destinationId, resp); // depends on control dependency: [if], data = [none] } else if (result == DuplicateCounter.MISMATCH) { VoltDB.crashGlobalVoltDB("HASH MISMATCH running multi-part procedure.", true, null); // depends on control dependency: [if], data = [none] } else if (result == DuplicateCounter.ABORT) { VoltDB.crashGlobalVoltDB("PARTIAL ROLLBACK/ABORT running multi-part procedure.", true, null); // depends on control dependency: [if], data = [none] } // doing duplicate suppression: all done. return; // depends on control dependency: [if], data = [none] } // No k-safety means no replica: read/write queries on master. // K-safety: read-only queries (on master) or write queries (on replica). if ( (m_isLeader || (!m_isLeader && message.isExecutedOnPreviousLeader())) && m_sendToHSIds.length > 0 && message.getRespBufferable() && (txn == null || txn.isReadOnly()) ) { // on k-safety leader with safe reads configuration: one shot reads + normal multi-fragments MP reads // we will have to buffer these reads until previous writes acked in the cluster. long readTxnId = txn == null ? message.getSpHandle() : txn.m_spHandle; m_bufferedReadLog.offer(m_mailbox, message, readTxnId, m_repairLogTruncationHandle); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // for complete writes txn, we will advance the transaction point if (txn != null && !txn.isReadOnly() && txn.isDone()) { setRepairLogTruncationHandle(txn.m_spHandle, message.isExecutedOnPreviousLeader()); // depends on control dependency: [if], data = [(txn] } if (traceLog != null) { traceLog.add(() -> VoltTrace.endAsync("recvfragment", MiscUtils.hsIdPairTxnIdToString(m_mailbox.getHSId(), message.m_sourceHSId, message.getSpHandle(), message.getTxnId()), "status", message.getStatusCode())); // depends on control dependency: [if], data = [none] } m_mailbox.send(message.getDestinationSiteId(), message); } }
public class class_name { protected String[] getSplitHosts(BlockLocation[] blkLocations, long offset, long splitSize, NetworkTopology clusterMap) throws IOException { int startIndex = getBlockIndex(blkLocations, offset); long bytesInThisBlock = blkLocations[startIndex].getOffset() + blkLocations[startIndex].getLength() - offset; //If this is the only block, just return if (bytesInThisBlock >= splitSize) { return blkLocations[startIndex].getHosts(); } long bytesInFirstBlock = bytesInThisBlock; int index = startIndex + 1; splitSize -= bytesInThisBlock; while (splitSize > 0) { bytesInThisBlock = Math.min(splitSize, blkLocations[index++].getLength()); splitSize -= bytesInThisBlock; } long bytesInLastBlock = bytesInThisBlock; int endIndex = index - 1; Map <Node,NodeInfo> hostsMap = new IdentityHashMap<Node,NodeInfo>(); Map <Node,NodeInfo> racksMap = new IdentityHashMap<Node,NodeInfo>(); String [] allTopos = new String[0]; // Build the hierarchy and aggregate the contribution of // bytes at each level. See TestGetSplitHosts.java for (index = startIndex; index <= endIndex; index++) { // Establish the bytes in this block if (index == startIndex) { bytesInThisBlock = bytesInFirstBlock; } else if (index == endIndex) { bytesInThisBlock = bytesInLastBlock; } else { bytesInThisBlock = blkLocations[index].getLength(); } allTopos = blkLocations[index].getTopologyPaths(); // If no topology information is available, just // prefix a fakeRack if (allTopos.length == 0) { allTopos = fakeRacks(blkLocations, index); } // NOTE: This code currently works only for one level of // hierarchy (rack/host). However, it is relatively easy // to extend this to support aggregation at different // levels for (String topo: allTopos) { Node node, parentNode; NodeInfo nodeInfo, parentNodeInfo; node = clusterMap.getNode(topo); if (node == null) { node = new NodeBase(topo); clusterMap.add(node); } nodeInfo = hostsMap.get(node); if (nodeInfo == null) { nodeInfo = new NodeInfo(node); hostsMap.put(node,nodeInfo); parentNode = node.getParent(); parentNodeInfo = racksMap.get(parentNode); if (parentNodeInfo == null) { parentNodeInfo = new NodeInfo(parentNode); racksMap.put(parentNode,parentNodeInfo); } parentNodeInfo.addLeaf(nodeInfo); } else { nodeInfo = hostsMap.get(node); parentNode = node.getParent(); parentNodeInfo = racksMap.get(parentNode); } nodeInfo.addValue(index, bytesInThisBlock); parentNodeInfo.addValue(index, bytesInThisBlock); } // for all topos } // for all indices return identifyHosts(allTopos.length, racksMap); } }
public class class_name { protected String[] getSplitHosts(BlockLocation[] blkLocations, long offset, long splitSize, NetworkTopology clusterMap) throws IOException { int startIndex = getBlockIndex(blkLocations, offset); long bytesInThisBlock = blkLocations[startIndex].getOffset() + blkLocations[startIndex].getLength() - offset; //If this is the only block, just return if (bytesInThisBlock >= splitSize) { return blkLocations[startIndex].getHosts(); } long bytesInFirstBlock = bytesInThisBlock; int index = startIndex + 1; splitSize -= bytesInThisBlock; while (splitSize > 0) { bytesInThisBlock = Math.min(splitSize, blkLocations[index++].getLength()); splitSize -= bytesInThisBlock; } long bytesInLastBlock = bytesInThisBlock; int endIndex = index - 1; Map <Node,NodeInfo> hostsMap = new IdentityHashMap<Node,NodeInfo>(); Map <Node,NodeInfo> racksMap = new IdentityHashMap<Node,NodeInfo>(); String [] allTopos = new String[0]; // Build the hierarchy and aggregate the contribution of // bytes at each level. See TestGetSplitHosts.java for (index = startIndex; index <= endIndex; index++) { // Establish the bytes in this block if (index == startIndex) { bytesInThisBlock = bytesInFirstBlock; } else if (index == endIndex) { bytesInThisBlock = bytesInLastBlock; } else { bytesInThisBlock = blkLocations[index].getLength(); } allTopos = blkLocations[index].getTopologyPaths(); // If no topology information is available, just // prefix a fakeRack if (allTopos.length == 0) { allTopos = fakeRacks(blkLocations, index); } // NOTE: This code currently works only for one level of // hierarchy (rack/host). However, it is relatively easy // to extend this to support aggregation at different // levels for (String topo: allTopos) { Node node, parentNode; NodeInfo nodeInfo, parentNodeInfo; node = clusterMap.getNode(topo); if (node == null) { node = new NodeBase(topo); // depends on control dependency: [if], data = [none] clusterMap.add(node); // depends on control dependency: [if], data = [(node] } nodeInfo = hostsMap.get(node); if (nodeInfo == null) { nodeInfo = new NodeInfo(node); // depends on control dependency: [if], data = [none] hostsMap.put(node,nodeInfo); // depends on control dependency: [if], data = [none] parentNode = node.getParent(); // depends on control dependency: [if], data = [none] parentNodeInfo = racksMap.get(parentNode); // depends on control dependency: [if], data = [none] if (parentNodeInfo == null) { parentNodeInfo = new NodeInfo(parentNode); // depends on control dependency: [if], data = [none] racksMap.put(parentNode,parentNodeInfo); // depends on control dependency: [if], data = [none] } parentNodeInfo.addLeaf(nodeInfo); // depends on control dependency: [if], data = [(nodeInfo] } else { nodeInfo = hostsMap.get(node); // depends on control dependency: [if], data = [none] parentNode = node.getParent(); // depends on control dependency: [if], data = [none] parentNodeInfo = racksMap.get(parentNode); // depends on control dependency: [if], data = [none] } nodeInfo.addValue(index, bytesInThisBlock); parentNodeInfo.addValue(index, bytesInThisBlock); } // for all topos } // for all indices return identifyHosts(allTopos.length, racksMap); } }
public class class_name { @PublicEvolving public int getInteger(ConfigOption<Integer> configOption, int overrideDefault) { Object o = getRawValueFromOption(configOption); if (o == null) { return overrideDefault; } return convertToInt(o, configOption.defaultValue()); } }
public class class_name { @PublicEvolving public int getInteger(ConfigOption<Integer> configOption, int overrideDefault) { Object o = getRawValueFromOption(configOption); if (o == null) { return overrideDefault; // depends on control dependency: [if], data = [none] } return convertToInt(o, configOption.defaultValue()); } }
public class class_name { private void fetchColumns() { Vector temp = new Vector(20); Vector tempType = new Vector(20); try { if (cConn == null) { return; } if (dbmeta == null) { dbmeta = cConn.getMetaData(); } ResultSet colList = dbmeta.getColumns(null, null, tableName, "%"); while (colList.next()) { temp.addElement(colList.getString("COLUMN_NAME")); tempType.addElement(new Short(colList.getShort("DATA_TYPE"))); } colList.close(); } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); } columns = new String[temp.size()]; temp.copyInto(columns); columnTypes = new short[temp.size()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = ((Short) tempType.elementAt(i)).shortValue(); } } }
public class class_name { private void fetchColumns() { Vector temp = new Vector(20); Vector tempType = new Vector(20); try { if (cConn == null) { return; // depends on control dependency: [if], data = [none] } if (dbmeta == null) { dbmeta = cConn.getMetaData(); // depends on control dependency: [if], data = [none] } ResultSet colList = dbmeta.getColumns(null, null, tableName, "%"); while (colList.next()) { temp.addElement(colList.getString("COLUMN_NAME")); // depends on control dependency: [while], data = [none] tempType.addElement(new Short(colList.getShort("DATA_TYPE"))); // depends on control dependency: [while], data = [none] } colList.close(); // depends on control dependency: [try], data = [none] } catch (SQLException e) { ZaurusEditor.printStatus("SQL Exception: " + e.getMessage()); } // depends on control dependency: [catch], data = [none] columns = new String[temp.size()]; temp.copyInto(columns); columnTypes = new short[temp.size()]; for (int i = 0; i < columnTypes.length; i++) { columnTypes[i] = ((Short) tempType.elementAt(i)).shortValue(); // depends on control dependency: [for], data = [i] } } }
public class class_name { static String _getMimeType(String uriOrPath) { String mime = MIME_MAP.get(__getExtension(uriOrPath)); if (mime == null) { mime = DEFAULT_MIME; } return mime; } }
public class class_name { static String _getMimeType(String uriOrPath) { String mime = MIME_MAP.get(__getExtension(uriOrPath)); if (mime == null) { mime = DEFAULT_MIME; // depends on control dependency: [if], data = [none] } return mime; } }
public class class_name { private Map<String, Object> getAllReflectedValues() { Map<String, Object> res = new LinkedHashMap<>(); for (Map.Entry<MethodSymbol, Attribute> entry : getAllValues().entrySet()) { MethodSymbol meth = entry.getKey(); Object value = generateValue(meth, entry.getValue()); if (value != null) { res.put(meth.name.toString(), value); } else { // Ignore this element. May (properly) lead to // IncompleteAnnotationException somewhere down the line. } } return res; } }
public class class_name { private Map<String, Object> getAllReflectedValues() { Map<String, Object> res = new LinkedHashMap<>(); for (Map.Entry<MethodSymbol, Attribute> entry : getAllValues().entrySet()) { MethodSymbol meth = entry.getKey(); Object value = generateValue(meth, entry.getValue()); if (value != null) { res.put(meth.name.toString(), value); // depends on control dependency: [if], data = [none] } else { // Ignore this element. May (properly) lead to // IncompleteAnnotationException somewhere down the line. } } return res; } }
public class class_name { private static boolean isLast(HttpMessage httpMessage) { if (httpMessage instanceof FullHttpMessage) { FullHttpMessage fullMessage = (FullHttpMessage) httpMessage; if (fullMessage.trailingHeaders().isEmpty() && !fullMessage.content().isReadable()) { return true; } } return false; } }
public class class_name { private static boolean isLast(HttpMessage httpMessage) { if (httpMessage instanceof FullHttpMessage) { FullHttpMessage fullMessage = (FullHttpMessage) httpMessage; if (fullMessage.trailingHeaders().isEmpty() && !fullMessage.content().isReadable()) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void updateBinaryRepresenation() { // check whether the binary state is in sync final int firstModified = this.firstModifiedPos; if (firstModified == Integer.MAX_VALUE) { return; } final InternalDeSerializer serializer = this.serializer; final int[] offsets = this.offsets; final int numFields = this.numFields; serializer.memory = this.switchBuffer != null ? this.switchBuffer : (this.binaryLen > 0 ? new byte[this.binaryLen] : new byte[numFields * DEFAULT_FIELD_LEN_ESTIMATE + 1]); serializer.position = 0; if (numFields > 0) { int offset = 0; // search backwards to find the latest preceding non-null field if (firstModified > 0) { for (int i = firstModified - 1; i >= 0; i--) { if (this.offsets[i] != NULL_INDICATOR_OFFSET) { offset = this.offsets[i] + this.lengths[i]; break; } } } // we assume that changed and unchanged fields are interleaved and serialize into another array try { if (offset > 0) { // copy the first unchanged portion as one serializer.write(this.binaryData, 0, offset); } // copy field by field for (int i = firstModified; i < numFields; i++) { final int co = offsets[i]; /// skip null fields if (co == NULL_INDICATOR_OFFSET) { continue; } offsets[i] = offset; if (co == MODIFIED_INDICATOR_OFFSET) { final Value writeField = this.writeFields[i]; if (writeField == RESERVE_SPACE) { // RESERVE_SPACE is a placeholder indicating lengths[i] bytes should be reserved final int length = this.lengths[i]; if (serializer.position >= serializer.memory.length - length - 1) { serializer.resize(length); } serializer.position += length; } else { // serialize modified fields this.writeFields[i].write(serializer); } } else { // bin-copy unmodified fields serializer.write(this.binaryData, co, this.lengths[i]); } this.lengths[i] = serializer.position - offset; offset = serializer.position; } } catch (Exception e) { throw new RuntimeException("Error in data type serialization: " + e.getMessage(), e); } } serializeHeader(serializer, offsets, numFields); // set the fields this.switchBuffer = this.binaryData; this.binaryData = serializer.memory; this.binaryLen = serializer.position; this.firstModifiedPos = Integer.MAX_VALUE; } }
public class class_name { public void updateBinaryRepresenation() { // check whether the binary state is in sync final int firstModified = this.firstModifiedPos; if (firstModified == Integer.MAX_VALUE) { return; // depends on control dependency: [if], data = [none] } final InternalDeSerializer serializer = this.serializer; final int[] offsets = this.offsets; final int numFields = this.numFields; serializer.memory = this.switchBuffer != null ? this.switchBuffer : (this.binaryLen > 0 ? new byte[this.binaryLen] : new byte[numFields * DEFAULT_FIELD_LEN_ESTIMATE + 1]); serializer.position = 0; if (numFields > 0) { int offset = 0; // search backwards to find the latest preceding non-null field if (firstModified > 0) { for (int i = firstModified - 1; i >= 0; i--) { if (this.offsets[i] != NULL_INDICATOR_OFFSET) { offset = this.offsets[i] + this.lengths[i]; // depends on control dependency: [if], data = [none] break; } } } // we assume that changed and unchanged fields are interleaved and serialize into another array try { if (offset > 0) { // copy the first unchanged portion as one serializer.write(this.binaryData, 0, offset); // depends on control dependency: [if], data = [none] } // copy field by field for (int i = firstModified; i < numFields; i++) { final int co = offsets[i]; /// skip null fields if (co == NULL_INDICATOR_OFFSET) { continue; } offsets[i] = offset; // depends on control dependency: [for], data = [i] if (co == MODIFIED_INDICATOR_OFFSET) { final Value writeField = this.writeFields[i]; if (writeField == RESERVE_SPACE) { // RESERVE_SPACE is a placeholder indicating lengths[i] bytes should be reserved final int length = this.lengths[i]; if (serializer.position >= serializer.memory.length - length - 1) { serializer.resize(length); // depends on control dependency: [if], data = [none] } serializer.position += length; // depends on control dependency: [if], data = [none] } else { // serialize modified fields this.writeFields[i].write(serializer); // depends on control dependency: [if], data = [none] } } else { // bin-copy unmodified fields serializer.write(this.binaryData, co, this.lengths[i]); // depends on control dependency: [if], data = [none] } this.lengths[i] = serializer.position - offset; // depends on control dependency: [for], data = [i] offset = serializer.position; // depends on control dependency: [for], data = [none] } } catch (Exception e) { throw new RuntimeException("Error in data type serialization: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } serializeHeader(serializer, offsets, numFields); // set the fields this.switchBuffer = this.binaryData; this.binaryData = serializer.memory; this.binaryLen = serializer.position; this.firstModifiedPos = Integer.MAX_VALUE; } }