code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
|---|---|
public class class_name {
public static byte[] hash(byte[] data, int offset, int len) {
try {
MessageDigest a = MessageDigest.getInstance("SHA-256");
a.update(data, offset, len);
return a.digest();
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
} }
|
public class class_name {
public static byte[] hash(byte[] data, int offset, int len) {
try {
MessageDigest a = MessageDigest.getInstance("SHA-256");
a.update(data, offset, len); // depends on control dependency: [try], data = [none]
return a.digest(); // depends on control dependency: [try], data = [none]
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private static void applyShapeStyle(Element element, ShapeStyle style) {
// First check the presence of the fill and stroke elements:
NodeList<Element> fills = element.getElementsByTagName("fill");
if (fills.getLength() == 0) {
Element stroke = Dom.createElementNS(Dom.NS_VML, "stroke");
element.appendChild(stroke);
Element fill = Dom.createElementNS(Dom.NS_VML, "fill");
element.appendChild(fill);
fills = element.getElementsByTagName("fill");
}
// Then if fill-color then filled=true:
if (style.getFillColor() != null) {
element.setAttribute("filled", "true");
fills.getItem(0).setAttribute("color", style.getFillColor());
fills.getItem(0).setAttribute("opacity", Float.toString(style.getFillOpacity()));
} else {
element.setAttribute("filled", "false");
}
// Then if stroke-color then stroke=true:
if (style.getStrokeColor() != null) {
element.setAttribute("stroked", "true");
NodeList<Element> strokes = element.getElementsByTagName("stroke");
strokes.getItem(0).setAttribute("color", style.getStrokeColor());
strokes.getItem(0).setAttribute("opacity", Float.toString(style.getStrokeOpacity()));
element.setAttribute("strokeweight", Float.toString(style.getStrokeWidth()));
} else {
element.setAttribute("stroked", "false");
}
} }
|
public class class_name {
private static void applyShapeStyle(Element element, ShapeStyle style) {
// First check the presence of the fill and stroke elements:
NodeList<Element> fills = element.getElementsByTagName("fill");
if (fills.getLength() == 0) {
Element stroke = Dom.createElementNS(Dom.NS_VML, "stroke");
element.appendChild(stroke); // depends on control dependency: [if], data = [none]
Element fill = Dom.createElementNS(Dom.NS_VML, "fill");
element.appendChild(fill); // depends on control dependency: [if], data = [none]
fills = element.getElementsByTagName("fill"); // depends on control dependency: [if], data = [none]
}
// Then if fill-color then filled=true:
if (style.getFillColor() != null) {
element.setAttribute("filled", "true"); // depends on control dependency: [if], data = [none]
fills.getItem(0).setAttribute("color", style.getFillColor()); // depends on control dependency: [if], data = [none]
fills.getItem(0).setAttribute("opacity", Float.toString(style.getFillOpacity())); // depends on control dependency: [if], data = [none]
} else {
element.setAttribute("filled", "false"); // depends on control dependency: [if], data = [none]
}
// Then if stroke-color then stroke=true:
if (style.getStrokeColor() != null) {
element.setAttribute("stroked", "true"); // depends on control dependency: [if], data = [none]
NodeList<Element> strokes = element.getElementsByTagName("stroke");
strokes.getItem(0).setAttribute("color", style.getStrokeColor()); // depends on control dependency: [if], data = [none]
strokes.getItem(0).setAttribute("opacity", Float.toString(style.getStrokeOpacity())); // depends on control dependency: [if], data = [none]
element.setAttribute("strokeweight", Float.toString(style.getStrokeWidth())); // depends on control dependency: [if], data = [none]
} else {
element.setAttribute("stroked", "false"); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private Iterator<Column<ByteBuffer>> readManifestForChannel(final String channel, final boolean weak) {
final ByteBuffer oldestSlab = weak ? _oldestSlab.getIfPresent(channel) : null;
final ConsistencyLevel consistency;
RangeBuilder range = new RangeBuilder().setLimit(50);
if (oldestSlab != null) {
range.setStart(oldestSlab);
consistency = ConsistencyLevel.CL_LOCAL_ONE;
} else {
consistency = ConsistencyLevel.CL_LOCAL_QUORUM;
}
final Iterator<Column<ByteBuffer>> manifestColumns = executePaginated(
_keyspace.prepareQuery(ColumnFamilies.MANIFEST, consistency)
.getKey(channel)
.withColumnRange(range.build())
.autoPaginate(true));
if (oldestSlab != null) {
// Query was executed weakly using the cached oldest slab, so don't update the cache with an unreliable oldest value
return manifestColumns;
} else {
PeekingIterator<Column<ByteBuffer>> peekingManifestColumns = Iterators.peekingIterator(manifestColumns);
if (peekingManifestColumns.hasNext()) {
// Cache the first slab returned from querying the full manifest column family since it is the oldest.
cacheOldestSlabForChannel(channel, TimeUUIDSerializer.get().fromByteBuffer(peekingManifestColumns.peek().getName()));
return peekingManifestColumns;
} else {
// Channel was completely empty. Cache a TimeUUID for the current time. This will cause future calls
// to read at most 1 minute of tombstones until the cache expires 10 seconds later.
cacheOldestSlabForChannel(channel, TimeUUIDs.newUUID());
return Iterators.emptyIterator();
}
}
} }
|
public class class_name {
private Iterator<Column<ByteBuffer>> readManifestForChannel(final String channel, final boolean weak) {
final ByteBuffer oldestSlab = weak ? _oldestSlab.getIfPresent(channel) : null;
final ConsistencyLevel consistency;
RangeBuilder range = new RangeBuilder().setLimit(50);
if (oldestSlab != null) {
range.setStart(oldestSlab); // depends on control dependency: [if], data = [(oldestSlab]
consistency = ConsistencyLevel.CL_LOCAL_ONE; // depends on control dependency: [if], data = [none]
} else {
consistency = ConsistencyLevel.CL_LOCAL_QUORUM; // depends on control dependency: [if], data = [none]
}
final Iterator<Column<ByteBuffer>> manifestColumns = executePaginated(
_keyspace.prepareQuery(ColumnFamilies.MANIFEST, consistency)
.getKey(channel)
.withColumnRange(range.build())
.autoPaginate(true));
if (oldestSlab != null) {
// Query was executed weakly using the cached oldest slab, so don't update the cache with an unreliable oldest value
return manifestColumns; // depends on control dependency: [if], data = [none]
} else {
PeekingIterator<Column<ByteBuffer>> peekingManifestColumns = Iterators.peekingIterator(manifestColumns);
if (peekingManifestColumns.hasNext()) {
// Cache the first slab returned from querying the full manifest column family since it is the oldest.
cacheOldestSlabForChannel(channel, TimeUUIDSerializer.get().fromByteBuffer(peekingManifestColumns.peek().getName())); // depends on control dependency: [if], data = [none]
return peekingManifestColumns; // depends on control dependency: [if], data = [none]
} else {
// Channel was completely empty. Cache a TimeUUID for the current time. This will cause future calls
// to read at most 1 minute of tombstones until the cache expires 10 seconds later.
cacheOldestSlabForChannel(channel, TimeUUIDs.newUUID()); // depends on control dependency: [if], data = [none]
return Iterators.emptyIterator(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public final int prev(int node) {
final int left = left(node);
if (left != NIL) {
return last(left);
} else {
int parent = parent(node);
while (parent != NIL && node == left(parent)) {
node = parent;
parent = parent(parent);
}
return parent;
}
} }
|
public class class_name {
public final int prev(int node) {
final int left = left(node);
if (left != NIL) {
return last(left); // depends on control dependency: [if], data = [(left]
} else {
int parent = parent(node);
while (parent != NIL && node == left(parent)) {
node = parent; // depends on control dependency: [while], data = [none]
parent = parent(parent); // depends on control dependency: [while], data = [none]
}
return parent; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public EClass getIfcTimePeriod() {
if (ifcTimePeriodEClass == null) {
ifcTimePeriodEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(722);
}
return ifcTimePeriodEClass;
} }
|
public class class_name {
@Override
public EClass getIfcTimePeriod() {
if (ifcTimePeriodEClass == null) {
ifcTimePeriodEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(722);
// depends on control dependency: [if], data = [none]
}
return ifcTimePeriodEClass;
} }
|
public class class_name {
public void marshall(Category category, ProtocolMarshaller protocolMarshaller) {
if (category == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(category.getCategoryId(), CATEGORYID_BINDING);
protocolMarshaller.marshall(category.getCategoryName(), CATEGORYNAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(Category category, ProtocolMarshaller protocolMarshaller) {
if (category == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(category.getCategoryId(), CATEGORYID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(category.getCategoryName(), CATEGORYNAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
protected void loadBasicStyle()
{
ctx.updateForGraphics(style, g);
display = style.getProperty("display");
if (display == null) display = CSSProperty.Display.INLINE;
CSSProperty.Float floating = style.getProperty("float");
if (floating == null) floating = BlockBox.FLOAT_NONE;
position = style.getProperty("position");
if (position == null) position = BlockBox.POS_STATIC;
//apply combination rules
//http://www.w3.org/TR/CSS21/visuren.html#dis-pos-flo
if (display == ElementBox.DISPLAY_NONE)
{
position = BlockBox.POS_STATIC;
floating = BlockBox.FLOAT_NONE;
}
else if (position == BlockBox.POS_ABSOLUTE || position == BlockBox.POS_FIXED)
{
floating = BlockBox.FLOAT_NONE;
}
//compute the display computed value
if (floating != BlockBox.FLOAT_NONE || position == BlockBox.POS_ABSOLUTE || position == BlockBox.POS_FIXED || isRootElement())
{
if (display == DISPLAY_INLINE_TABLE)
display = DISPLAY_TABLE;
else if (display == DISPLAY_INLINE ||
display == DISPLAY_RUN_IN ||
display == DISPLAY_TABLE_ROW_GROUP ||
display == DISPLAY_TABLE_COLUMN ||
display == DISPLAY_TABLE_COLUMN_GROUP ||
display == DISPLAY_TABLE_HEADER_GROUP ||
display == DISPLAY_TABLE_FOOTER_GROUP ||
display == DISPLAY_TABLE_ROW ||
display == DISPLAY_TABLE_CELL ||
display == DISPLAY_TABLE_CAPTION ||
display == DISPLAY_INLINE_BLOCK)
display = DISPLAY_BLOCK;
}
isblock = (display == DISPLAY_BLOCK);
displayed = (display != DISPLAY_NONE && display != DISPLAY_TABLE_COLUMN);
visible = (style.getProperty("visibility") != CSSProperty.Visibility.HIDDEN);
//line height
CSSProperty.LineHeight lh = style.getProperty("line-height");
if (lh == null || lh == CSSProperty.LineHeight.NORMAL)
lineHeight = Math.round(DEFAULT_LINE_HEIGHT * ctx.getFontHeight());
else if (lh == CSSProperty.LineHeight.length)
{
TermLength len = style.getValue(TermLength.class, "line-height");
lineHeight = (int) ctx.pxLength(len);
}
else if (lh == CSSProperty.LineHeight.percentage)
{
TermPercent len = style.getValue(TermPercent.class, "line-height");
lineHeight = (int) ctx.pxLength(len, ctx.getFontHeight());
}
else //must be INTEGER or NUMBER
{
Term<?> len = style.getValue("line-height", true);
float r;
if (len instanceof TermInteger)
r = ((TermInteger) len).getValue();
else
r = ((TermNumber) len).getValue();
lineHeight = Math.round(r * ctx.getFontHeight());
}
//whitespace
whitespace = style.getProperty("white-space");
if (whitespace == null) whitespace = WHITESPACE_NORMAL;
//background
loadBackground();
//z-index
CSSProperty.ZIndex z = style.getProperty("z-index");
if (z != null && z != ZIndex.AUTO)
{
zset = true;
Term<?> zterm = style.getValue("z-index", true);
if (zterm instanceof TermInteger)
zIndex = ((TermInteger) zterm).getValue().intValue();
else
zset = false;
}
else
zset = false;
//transformations -- applied on block-level or atomic inline-level elements only
if (isBlock() || isReplaced())
transform = style.getProperty("transform");
if (transform == null) transform = CSSProperty.Transform.NONE;
} }
|
public class class_name {
protected void loadBasicStyle()
{
ctx.updateForGraphics(style, g);
display = style.getProperty("display");
if (display == null) display = CSSProperty.Display.INLINE;
CSSProperty.Float floating = style.getProperty("float");
if (floating == null) floating = BlockBox.FLOAT_NONE;
position = style.getProperty("position");
if (position == null) position = BlockBox.POS_STATIC;
//apply combination rules
//http://www.w3.org/TR/CSS21/visuren.html#dis-pos-flo
if (display == ElementBox.DISPLAY_NONE)
{
position = BlockBox.POS_STATIC; // depends on control dependency: [if], data = [none]
floating = BlockBox.FLOAT_NONE; // depends on control dependency: [if], data = [none]
}
else if (position == BlockBox.POS_ABSOLUTE || position == BlockBox.POS_FIXED)
{
floating = BlockBox.FLOAT_NONE; // depends on control dependency: [if], data = [none]
}
//compute the display computed value
if (floating != BlockBox.FLOAT_NONE || position == BlockBox.POS_ABSOLUTE || position == BlockBox.POS_FIXED || isRootElement())
{
if (display == DISPLAY_INLINE_TABLE)
display = DISPLAY_TABLE;
else if (display == DISPLAY_INLINE ||
display == DISPLAY_RUN_IN ||
display == DISPLAY_TABLE_ROW_GROUP ||
display == DISPLAY_TABLE_COLUMN ||
display == DISPLAY_TABLE_COLUMN_GROUP ||
display == DISPLAY_TABLE_HEADER_GROUP ||
display == DISPLAY_TABLE_FOOTER_GROUP ||
display == DISPLAY_TABLE_ROW ||
display == DISPLAY_TABLE_CELL ||
display == DISPLAY_TABLE_CAPTION ||
display == DISPLAY_INLINE_BLOCK)
display = DISPLAY_BLOCK;
}
isblock = (display == DISPLAY_BLOCK);
displayed = (display != DISPLAY_NONE && display != DISPLAY_TABLE_COLUMN);
visible = (style.getProperty("visibility") != CSSProperty.Visibility.HIDDEN);
//line height
CSSProperty.LineHeight lh = style.getProperty("line-height");
if (lh == null || lh == CSSProperty.LineHeight.NORMAL)
lineHeight = Math.round(DEFAULT_LINE_HEIGHT * ctx.getFontHeight());
else if (lh == CSSProperty.LineHeight.length)
{
TermLength len = style.getValue(TermLength.class, "line-height");
lineHeight = (int) ctx.pxLength(len); // depends on control dependency: [if], data = [none]
}
else if (lh == CSSProperty.LineHeight.percentage)
{
TermPercent len = style.getValue(TermPercent.class, "line-height");
lineHeight = (int) ctx.pxLength(len, ctx.getFontHeight()); // depends on control dependency: [if], data = [none]
}
else //must be INTEGER or NUMBER
{
Term<?> len = style.getValue("line-height", true);
float r;
if (len instanceof TermInteger)
r = ((TermInteger) len).getValue();
else
r = ((TermNumber) len).getValue();
lineHeight = Math.round(r * ctx.getFontHeight()); // depends on control dependency: [if], data = [none]
}
//whitespace
whitespace = style.getProperty("white-space");
if (whitespace == null) whitespace = WHITESPACE_NORMAL;
//background
loadBackground();
//z-index
CSSProperty.ZIndex z = style.getProperty("z-index");
if (z != null && z != ZIndex.AUTO)
{
zset = true; // depends on control dependency: [if], data = [none]
Term<?> zterm = style.getValue("z-index", true);
if (zterm instanceof TermInteger)
zIndex = ((TermInteger) zterm).getValue().intValue();
else
zset = false;
}
else
zset = false;
//transformations -- applied on block-level or atomic inline-level elements only
if (isBlock() || isReplaced())
transform = style.getProperty("transform");
if (transform == null) transform = CSSProperty.Transform.NONE;
} }
|
public class class_name {
public int alloc(int size) {
int index = n;
int len = array.length;
if (n + size >= len) {
byte[] aux = new byte[len + blockSize];
System.arraycopy(array, 0, aux, 0, len);
array = aux;
}
n += size;
return index;
} }
|
public class class_name {
public int alloc(int size) {
int index = n;
int len = array.length;
if (n + size >= len) {
byte[] aux = new byte[len + blockSize];
System.arraycopy(array, 0, aux, 0, len); // depends on control dependency: [if], data = [none]
array = aux; // depends on control dependency: [if], data = [none]
}
n += size;
return index;
} }
|
public class class_name {
public static List<Table> getMaterializeViews(org.voltdb.catalog.Database database,
org.voltdb.catalog.Table table)
{
ArrayList<Table> tlist = new ArrayList<>();
CatalogMap<Table> tables = database.getTables();
for (Table t : tables) {
Table matsrc = t.getMaterializer();
if ((matsrc != null) && (matsrc.getRelativeIndex() == table.getRelativeIndex())) {
tlist.add(t);
}
}
return tlist;
} }
|
public class class_name {
public static List<Table> getMaterializeViews(org.voltdb.catalog.Database database,
org.voltdb.catalog.Table table)
{
ArrayList<Table> tlist = new ArrayList<>();
CatalogMap<Table> tables = database.getTables();
for (Table t : tables) {
Table matsrc = t.getMaterializer();
if ((matsrc != null) && (matsrc.getRelativeIndex() == table.getRelativeIndex())) {
tlist.add(t); // depends on control dependency: [if], data = [none]
}
}
return tlist;
} }
|
public class class_name {
public static UserTrades adaptTradeHistory(LakeBTCTradeResponse[] transactions) {
List<UserTrade> trades = new ArrayList<>();
long lastTradeId = 0;
for (LakeBTCTradeResponse trade : transactions) {
final OrderType orderType = trade.getType().startsWith("buy") ? OrderType.BID : OrderType.ASK;
BigDecimal originalAmount = trade.getAmount();
BigDecimal price = trade.getTotal().abs();
Date timestamp = DateUtils.fromMillisUtc(trade.getAt() * 1000L);
final String tradeId = trade.getId();
final CurrencyPair currencyPair = CurrencyPair.BTC_CNY;
UserTrade userTrade =
new UserTrade(
orderType,
originalAmount,
currencyPair,
price,
timestamp,
tradeId,
null,
null,
Currency.getInstance(currencyPair.counter.getCurrencyCode()));
trades.add(userTrade);
}
return new UserTrades(trades, lastTradeId, Trades.TradeSortType.SortByTimestamp);
} }
|
public class class_name {
public static UserTrades adaptTradeHistory(LakeBTCTradeResponse[] transactions) {
List<UserTrade> trades = new ArrayList<>();
long lastTradeId = 0;
for (LakeBTCTradeResponse trade : transactions) {
final OrderType orderType = trade.getType().startsWith("buy") ? OrderType.BID : OrderType.ASK;
BigDecimal originalAmount = trade.getAmount();
BigDecimal price = trade.getTotal().abs();
Date timestamp = DateUtils.fromMillisUtc(trade.getAt() * 1000L);
final String tradeId = trade.getId();
final CurrencyPair currencyPair = CurrencyPair.BTC_CNY;
UserTrade userTrade =
new UserTrade(
orderType,
originalAmount,
currencyPair,
price,
timestamp,
tradeId,
null,
null,
Currency.getInstance(currencyPair.counter.getCurrencyCode()));
trades.add(userTrade); // depends on control dependency: [for], data = [trade]
}
return new UserTrades(trades, lastTradeId, Trades.TradeSortType.SortByTimestamp);
} }
|
public class class_name {
static <T> T doWithMainClasses(File rootFolder, MainClassCallback<T> callback)
throws IOException {
if (!rootFolder.exists()) {
return null; // nothing to do
}
if (!rootFolder.isDirectory()) {
throw new IllegalArgumentException(
"Invalid root folder '" + rootFolder + "'");
}
String prefix = rootFolder.getAbsolutePath() + "/";
Deque<File> stack = new ArrayDeque<>();
stack.push(rootFolder);
while (!stack.isEmpty()) {
File file = stack.pop();
if (file.isFile()) {
try (InputStream inputStream = new FileInputStream(file)) {
ClassDescriptor classDescriptor = createClassDescriptor(inputStream);
if (classDescriptor != null && classDescriptor.isMainMethodFound()) {
String className = convertToClassName(file.getAbsolutePath(),
prefix);
T result = callback.doWith(new MainClass(className,
classDescriptor.getAnnotationNames()));
if (result != null) {
return result;
}
}
}
}
if (file.isDirectory()) {
pushAllSorted(stack, file.listFiles(PACKAGE_FOLDER_FILTER));
pushAllSorted(stack, file.listFiles(CLASS_FILE_FILTER));
}
}
return null;
} }
|
public class class_name {
static <T> T doWithMainClasses(File rootFolder, MainClassCallback<T> callback)
throws IOException {
if (!rootFolder.exists()) {
return null; // nothing to do
}
if (!rootFolder.isDirectory()) {
throw new IllegalArgumentException(
"Invalid root folder '" + rootFolder + "'");
}
String prefix = rootFolder.getAbsolutePath() + "/";
Deque<File> stack = new ArrayDeque<>();
stack.push(rootFolder);
while (!stack.isEmpty()) {
File file = stack.pop();
if (file.isFile()) {
try (InputStream inputStream = new FileInputStream(file)) {
ClassDescriptor classDescriptor = createClassDescriptor(inputStream);
if (classDescriptor != null && classDescriptor.isMainMethodFound()) {
String className = convertToClassName(file.getAbsolutePath(),
prefix);
T result = callback.doWith(new MainClass(className,
classDescriptor.getAnnotationNames()));
if (result != null) {
return result; // depends on control dependency: [if], data = [none]
}
}
}
}
if (file.isDirectory()) {
pushAllSorted(stack, file.listFiles(PACKAGE_FOLDER_FILTER));
pushAllSorted(stack, file.listFiles(CLASS_FILE_FILTER));
}
}
return null;
} }
|
public class class_name {
@Override
public Future<M> requestData() {
logger.debug(this + " requestData...");
try {
validateInitialization();
synchronized (syncMonitor) {
// Check if sync is in process.
if (syncFuture != null && !syncFuture.isDone()) {
// Recover sync task if it was canceled for instance during remote reinitialization.
if (syncTask == null || syncTask.isDone()) {
syncTask = sync();
}
return syncFuture;
} else {
// cleanup old sync task
if (syncTask != null && !syncTask.isDone()) {
syncTask.cancel(true);
}
}
// Create new sync process
syncFuture = new CompletableFutureLite();
syncTask = sync();
return syncFuture;
}
} catch (CouldNotPerformException ex) {
return FutureProcessor.canceledFuture(new CouldNotPerformException("Could not request data!", ex));
}
} }
|
public class class_name {
@Override
public Future<M> requestData() {
logger.debug(this + " requestData...");
try {
validateInitialization(); // depends on control dependency: [try], data = [none]
synchronized (syncMonitor) { // depends on control dependency: [try], data = [none]
// Check if sync is in process.
if (syncFuture != null && !syncFuture.isDone()) {
// Recover sync task if it was canceled for instance during remote reinitialization.
if (syncTask == null || syncTask.isDone()) {
syncTask = sync(); // depends on control dependency: [if], data = [none]
}
return syncFuture; // depends on control dependency: [if], data = [none]
} else {
// cleanup old sync task
if (syncTask != null && !syncTask.isDone()) {
syncTask.cancel(true); // depends on control dependency: [if], data = [none]
}
}
// Create new sync process
syncFuture = new CompletableFutureLite();
syncTask = sync();
return syncFuture;
}
} catch (CouldNotPerformException ex) {
return FutureProcessor.canceledFuture(new CouldNotPerformException("Could not request data!", ex));
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static void setNewIdentifier(ModelElementType type, ModelElementInstance modelElementInstance,
String newId, boolean withReferenceUpdate) {
Attribute<?> id = type.getAttribute(ID_ATTRIBUTE_NAME);
if (id != null && id instanceof StringAttribute && id.isIdAttribute()) {
((StringAttribute) id).setValue(modelElementInstance, newId, withReferenceUpdate);
}
} }
|
public class class_name {
public static void setNewIdentifier(ModelElementType type, ModelElementInstance modelElementInstance,
String newId, boolean withReferenceUpdate) {
Attribute<?> id = type.getAttribute(ID_ATTRIBUTE_NAME);
if (id != null && id instanceof StringAttribute && id.isIdAttribute()) {
((StringAttribute) id).setValue(modelElementInstance, newId, withReferenceUpdate); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public InputSplit[] sample(PathFilter pathFilter, double... weights) {
URI[] paths = pathFilter != null ? pathFilter.filter(locations()) : locations();
if (weights != null && weights.length > 0 && weights[0] != 1.0) {
InputSplit[] splits = new InputSplit[weights.length];
double totalWeight = 0;
for (int i = 0; i < weights.length; i++) {
totalWeight += weights[i];
}
double cumulWeight = 0;
int[] partitions = new int[weights.length + 1];
for (int i = 0; i < weights.length; i++) {
partitions[i] = (int) Math.round(cumulWeight * paths.length / totalWeight);
cumulWeight += weights[i];
}
partitions[weights.length] = paths.length;
for (int i = 0; i < weights.length; i++) {
List<URI> uris = new ArrayList<>();
for (int j = partitions[i]; j < partitions[i + 1]; j++) {
uris.add(paths[j]);
}
splits[i] = new CollectionInputSplit(uris);
}
return splits;
} else {
return new InputSplit[] {new CollectionInputSplit(Arrays.asList(paths))};
}
} }
|
public class class_name {
public InputSplit[] sample(PathFilter pathFilter, double... weights) {
URI[] paths = pathFilter != null ? pathFilter.filter(locations()) : locations();
if (weights != null && weights.length > 0 && weights[0] != 1.0) {
InputSplit[] splits = new InputSplit[weights.length];
double totalWeight = 0;
for (int i = 0; i < weights.length; i++) {
totalWeight += weights[i]; // depends on control dependency: [for], data = [i]
}
double cumulWeight = 0;
int[] partitions = new int[weights.length + 1];
for (int i = 0; i < weights.length; i++) {
partitions[i] = (int) Math.round(cumulWeight * paths.length / totalWeight); // depends on control dependency: [for], data = [i]
cumulWeight += weights[i]; // depends on control dependency: [for], data = [i]
}
partitions[weights.length] = paths.length; // depends on control dependency: [if], data = [none]
for (int i = 0; i < weights.length; i++) {
List<URI> uris = new ArrayList<>();
for (int j = partitions[i]; j < partitions[i + 1]; j++) {
uris.add(paths[j]); // depends on control dependency: [for], data = [j]
}
splits[i] = new CollectionInputSplit(uris); // depends on control dependency: [for], data = [i]
}
return splits; // depends on control dependency: [if], data = [none]
} else {
return new InputSplit[] {new CollectionInputSplit(Arrays.asList(paths))}; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static long[] truncateI(long[] v, int len) {
final int zap = (v.length * Long.SIZE) - len;
final int zapWords = (zap >>> LONG_LOG2_SIZE);
final int zapbits = zap & LONG_LOG2_MASK;
Arrays.fill(v, v.length - zapWords, v.length, 0);
if(zapbits > 0) {
v[v.length - zapWords - 1] &= (LONG_ALL_BITS >>> zapbits);
}
return v;
} }
|
public class class_name {
public static long[] truncateI(long[] v, int len) {
final int zap = (v.length * Long.SIZE) - len;
final int zapWords = (zap >>> LONG_LOG2_SIZE);
final int zapbits = zap & LONG_LOG2_MASK;
Arrays.fill(v, v.length - zapWords, v.length, 0);
if(zapbits > 0) {
v[v.length - zapWords - 1] &= (LONG_ALL_BITS >>> zapbits); // depends on control dependency: [if], data = [none]
}
return v;
} }
|
public class class_name {
@Override
public TemporalDataModelIF<U, I>[] split(final TemporalDataModelIF<U, I> data) {
@SuppressWarnings("unchecked")
final TemporalDataModelIF<U, I>[] splits = new TemporalDataModelIF[2];
splits[0] = new TemporalDataModel<>(); // training
splits[1] = new TemporalDataModel<>(); // test
if (perUser) {
for (U user : data.getUsers()) {
if (doSplitPerItems) {
List<I> items = new ArrayList<>();
data.getUserItems(user).forEach(items::add);
Collections.shuffle(items, rnd);
int splitPoint = Math.round(percentageTraining * items.size());
for (int i = 0; i < items.size(); i++) {
I item = items.get(i);
Double pref = data.getUserItemPreference(user, item);
Iterable<Long> time = data.getUserItemTimestamps(user, item);
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (i > splitPoint) {
datamodel = splits[1]; // test
}
if (pref != null) {
datamodel.addPreference(user, item, pref);
}
if (time != null) {
for (Long t : time) {
datamodel.addTimestamp(user, item, t);
}
}
}
} else {
List<Pair<I, Long>> itemsTime = new ArrayList<>();
for (I i : data.getUserItems(user)) {
for (Long t : data.getUserItemTimestamps(user, i)) {
itemsTime.add(new Pair<>(i, t));
}
}
Collections.shuffle(itemsTime, rnd);
int splitPoint = Math.round(percentageTraining * itemsTime.size());
for (int i = 0; i < itemsTime.size(); i++) {
Pair<I, Long> it = itemsTime.get(i);
I item = it.getFirst();
Long time = it.getSecond();
Double pref = data.getUserItemPreference(user, item);
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (i > splitPoint) {
datamodel = splits[1]; // test
}
if (pref != null) {
datamodel.addPreference(user, item, pref);
}
if (time != null) {
datamodel.addTimestamp(user, item, time);
}
}
}
}
} else {
for (U user : data.getUsers()) {
for (I item : data.getUserItems(user)) {
Double pref = data.getUserItemPreference(user, item);
Iterable<Long> time = data.getUserItemTimestamps(user, item);
if (doSplitPerItems) {
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (rnd.nextDouble() > percentageTraining) {
datamodel = splits[1]; // test
}
if (pref != null) {
datamodel.addPreference(user, item, pref);
}
if (time != null) {
for (Long t : time) {
datamodel.addTimestamp(user, item, t);
}
}
} else if (time != null) {
for (Long t : time) {
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (rnd.nextDouble() > percentageTraining) {
datamodel = splits[1]; // test
}
if (pref != null) {
datamodel.addPreference(user, item, pref);
}
datamodel.addTimestamp(user, item, t);
}
} else {
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (rnd.nextDouble() > percentageTraining) {
datamodel = splits[1]; // test
}
if (pref != null) {
datamodel.addPreference(user, item, pref);
}
}
}
}
}
return splits;
} }
|
public class class_name {
@Override
public TemporalDataModelIF<U, I>[] split(final TemporalDataModelIF<U, I> data) {
@SuppressWarnings("unchecked")
final TemporalDataModelIF<U, I>[] splits = new TemporalDataModelIF[2];
splits[0] = new TemporalDataModel<>(); // training
splits[1] = new TemporalDataModel<>(); // test
if (perUser) {
for (U user : data.getUsers()) {
if (doSplitPerItems) {
List<I> items = new ArrayList<>();
data.getUserItems(user).forEach(items::add); // depends on control dependency: [if], data = [none]
Collections.shuffle(items, rnd); // depends on control dependency: [if], data = [none]
int splitPoint = Math.round(percentageTraining * items.size());
for (int i = 0; i < items.size(); i++) {
I item = items.get(i);
Double pref = data.getUserItemPreference(user, item);
Iterable<Long> time = data.getUserItemTimestamps(user, item);
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (i > splitPoint) {
datamodel = splits[1]; // test // depends on control dependency: [if], data = [none]
}
if (pref != null) {
datamodel.addPreference(user, item, pref); // depends on control dependency: [if], data = [none]
}
if (time != null) {
for (Long t : time) {
datamodel.addTimestamp(user, item, t); // depends on control dependency: [for], data = [t]
}
}
}
} else {
List<Pair<I, Long>> itemsTime = new ArrayList<>();
for (I i : data.getUserItems(user)) {
for (Long t : data.getUserItemTimestamps(user, i)) {
itemsTime.add(new Pair<>(i, t)); // depends on control dependency: [for], data = [t]
}
}
Collections.shuffle(itemsTime, rnd); // depends on control dependency: [if], data = [none]
int splitPoint = Math.round(percentageTraining * itemsTime.size());
for (int i = 0; i < itemsTime.size(); i++) {
Pair<I, Long> it = itemsTime.get(i);
I item = it.getFirst();
Long time = it.getSecond();
Double pref = data.getUserItemPreference(user, item);
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (i > splitPoint) {
datamodel = splits[1]; // test // depends on control dependency: [if], data = [none]
}
if (pref != null) {
datamodel.addPreference(user, item, pref); // depends on control dependency: [if], data = [none]
}
if (time != null) {
datamodel.addTimestamp(user, item, time); // depends on control dependency: [if], data = [none]
}
}
}
}
} else {
for (U user : data.getUsers()) {
for (I item : data.getUserItems(user)) {
Double pref = data.getUserItemPreference(user, item);
Iterable<Long> time = data.getUserItemTimestamps(user, item);
if (doSplitPerItems) {
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (rnd.nextDouble() > percentageTraining) {
datamodel = splits[1]; // test // depends on control dependency: [if], data = [none]
}
if (pref != null) {
datamodel.addPreference(user, item, pref); // depends on control dependency: [if], data = [none]
}
if (time != null) {
for (Long t : time) {
datamodel.addTimestamp(user, item, t); // depends on control dependency: [for], data = [t]
}
}
} else if (time != null) {
for (Long t : time) {
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (rnd.nextDouble() > percentageTraining) {
datamodel = splits[1]; // test // depends on control dependency: [if], data = [none]
}
if (pref != null) {
datamodel.addPreference(user, item, pref); // depends on control dependency: [if], data = [none]
}
datamodel.addTimestamp(user, item, t); // depends on control dependency: [for], data = [t]
}
} else {
TemporalDataModelIF<U, I> datamodel = splits[0]; // training
if (rnd.nextDouble() > percentageTraining) {
datamodel = splits[1]; // test // depends on control dependency: [if], data = [none]
}
if (pref != null) {
datamodel.addPreference(user, item, pref); // depends on control dependency: [if], data = [none]
}
}
}
}
}
return splits;
} }
|
public class class_name {
public static char[] encode(byte[] src, char[] table, char pad) {
int len = src.length;
if (len == 0) return new char[0];
int blocks = (len / 3) * 3;
int chars = ((len - 1) / 3 + 1) << 2;
int tail = len - blocks;
if (pad == 0 && tail > 0) chars -= 3 - tail;
char[] dst = new char[chars];
int si = 0, di = 0;
while (si < blocks) {
int n = (src[si++] & 0xff) << 16 | (src[si++] & 0xff) << 8 | (src[si++] & 0xff);
dst[di++] = table[(n >>> 18) & 0x3f];
dst[di++] = table[(n >>> 12) & 0x3f];
dst[di++] = table[(n >>> 6) & 0x3f];
dst[di++] = table[n & 0x3f];
}
if (tail > 0) {
int n = (src[si] & 0xff) << 10;
if (tail == 2) n |= (src[++si] & 0xff) << 2;
dst[di++] = table[(n >>> 12) & 0x3f];
dst[di++] = table[(n >>> 6) & 0x3f];
if (tail == 2) dst[di++] = table[n & 0x3f];
if (pad != 0) {
if (tail == 1) dst[di++] = pad;
dst[di] = pad;
}
}
return dst;
} }
|
public class class_name {
public static char[] encode(byte[] src, char[] table, char pad) {
int len = src.length;
if (len == 0) return new char[0];
int blocks = (len / 3) * 3;
int chars = ((len - 1) / 3 + 1) << 2;
int tail = len - blocks;
if (pad == 0 && tail > 0) chars -= 3 - tail;
char[] dst = new char[chars];
int si = 0, di = 0;
while (si < blocks) {
int n = (src[si++] & 0xff) << 16 | (src[si++] & 0xff) << 8 | (src[si++] & 0xff);
dst[di++] = table[(n >>> 18) & 0x3f]; // depends on control dependency: [while], data = [none]
dst[di++] = table[(n >>> 12) & 0x3f]; // depends on control dependency: [while], data = [none]
dst[di++] = table[(n >>> 6) & 0x3f]; // depends on control dependency: [while], data = [none]
dst[di++] = table[n & 0x3f]; // depends on control dependency: [while], data = [none]
}
if (tail > 0) {
int n = (src[si] & 0xff) << 10;
if (tail == 2) n |= (src[++si] & 0xff) << 2;
dst[di++] = table[(n >>> 12) & 0x3f]; // depends on control dependency: [if], data = [none]
dst[di++] = table[(n >>> 6) & 0x3f]; // depends on control dependency: [if], data = [none]
if (tail == 2) dst[di++] = table[n & 0x3f];
if (pad != 0) {
if (tail == 1) dst[di++] = pad;
dst[di] = pad; // depends on control dependency: [if], data = [none]
}
}
return dst;
} }
|
public class class_name {
@Override
public EEnum getIfcConnectionTypeEnum() {
if (ifcConnectionTypeEnumEEnum == null) {
ifcConnectionTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(941);
}
return ifcConnectionTypeEnumEEnum;
} }
|
public class class_name {
@Override
public EEnum getIfcConnectionTypeEnum() {
if (ifcConnectionTypeEnumEEnum == null) {
ifcConnectionTypeEnumEEnum = (EEnum) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(941);
// depends on control dependency: [if], data = [none]
}
return ifcConnectionTypeEnumEEnum;
} }
|
public class class_name {
protected void notifyITransferContextPreProcessWorkState() {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINEST)) {
logger.entering(CLASS_NAME,"notifyITransferContextPostStoreState", this);
}
Iterator<ITransferContextService> TransferIterator = com.ibm.ws.webcontainer.osgi.WebContainer.getITransferContextServices();
if (TransferIterator != null) {
while(TransferIterator.hasNext()){
ITransferContextService tcs = TransferIterator.next();
if (tcs instanceof ITransferContextServiceExt) {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINEST)) {
logger.logp(Level.FINEST, CLASS_NAME, "notifyITransferContextPostStoreState", "calling postStoreState on: " + tcs);
}
((ITransferContextServiceExt)tcs).preProcessWorkState(storeStateCtxData);
}
}
}
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINEST)) {
logger.exiting(CLASS_NAME,"notifyITransferContextPostStoreState", this);
}
} }
|
public class class_name {
protected void notifyITransferContextPreProcessWorkState() {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINEST)) {
logger.entering(CLASS_NAME,"notifyITransferContextPostStoreState", this); // depends on control dependency: [if], data = [none]
}
Iterator<ITransferContextService> TransferIterator = com.ibm.ws.webcontainer.osgi.WebContainer.getITransferContextServices();
if (TransferIterator != null) {
while(TransferIterator.hasNext()){
ITransferContextService tcs = TransferIterator.next();
if (tcs instanceof ITransferContextServiceExt) {
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && logger.isLoggable(Level.FINEST)) {
logger.logp(Level.FINEST, CLASS_NAME, "notifyITransferContextPostStoreState", "calling postStoreState on: " + tcs); // depends on control dependency: [if], data = [none]
}
((ITransferContextServiceExt)tcs).preProcessWorkState(storeStateCtxData); // depends on control dependency: [if], data = [none]
}
}
}
if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINEST)) {
logger.exiting(CLASS_NAME,"notifyITransferContextPostStoreState", this); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@Override
public Tag[] inlineTags() {
if (inlineTags == null) {
inlineTags = Comment.getInlineTags(holder, exceptionComment());
}
return inlineTags;
} }
|
public class class_name {
@Override
public Tag[] inlineTags() {
if (inlineTags == null) {
inlineTags = Comment.getInlineTags(holder, exceptionComment()); // depends on control dependency: [if], data = [none]
}
return inlineTags;
} }
|
public class class_name {
public void queueNotification (ObserverList.ObserverOp<Object> amop)
{
if (_observers != null) {
if (_mgr != null) {
_mgr.queueNotification(_observers, amop);
} else {
log.warning("Have no manager, dropping notification", "media", this, "op", amop);
}
}
} }
|
public class class_name {
public void queueNotification (ObserverList.ObserverOp<Object> amop)
{
if (_observers != null) {
if (_mgr != null) {
_mgr.queueNotification(_observers, amop); // depends on control dependency: [if], data = [none]
} else {
log.warning("Have no manager, dropping notification", "media", this, "op", amop); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
static private String resolve(String base, String rel) {
// Short-circuit check for unspecified base or relative URI.
if (base == null || base.isEmpty()) return rel;
if (rel == null || rel.isEmpty()) return base;
// Attempt to parse both strings as URIs.
URI b = null;
try {
b = new URI(base);
} catch (URISyntaxException IGNORE) {
// ignore, we'll handle this later.
}
URI r = null;
try {
r = new URI(rel);
} catch (URISyntaxException IGNORE) {
// ignore, we'll handle this later.
}
// Both URIs parsed OK, proceed as usual.
if (b != null && r != null) return b.resolve(r).toString();
// Syntax exception in the base or relative URI, need to fudge here...
// Is the URI being resolved absolute (or does it look like it)?
if ((r != null && r.isAbsolute()) || rel.startsWith("http:")) return rel;
// One or both of the strings didn't parse as a valid URI so URI.resolve won't help us here.
// Just smash them together and hope for the best.
return base + rel;
} }
|
public class class_name {
static private String resolve(String base, String rel) {
// Short-circuit check for unspecified base or relative URI.
if (base == null || base.isEmpty()) return rel;
if (rel == null || rel.isEmpty()) return base;
// Attempt to parse both strings as URIs.
URI b = null;
try {
b = new URI(base); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException IGNORE) {
// ignore, we'll handle this later.
} // depends on control dependency: [catch], data = [none]
URI r = null;
try {
r = new URI(rel); // depends on control dependency: [try], data = [none]
} catch (URISyntaxException IGNORE) {
// ignore, we'll handle this later.
} // depends on control dependency: [catch], data = [none]
// Both URIs parsed OK, proceed as usual.
if (b != null && r != null) return b.resolve(r).toString();
// Syntax exception in the base or relative URI, need to fudge here...
// Is the URI being resolved absolute (or does it look like it)?
if ((r != null && r.isAbsolute()) || rel.startsWith("http:")) return rel;
// One or both of the strings didn't parse as a valid URI so URI.resolve won't help us here.
// Just smash them together and hope for the best.
return base + rel;
} }
|
public class class_name {
@Override
public ProposalPersonContract getPrincipalInvestigator(ProposalDevelopmentDocumentContract pdDoc) {
ProposalPersonContract proposalPerson = null;
if (pdDoc != null) {
for (ProposalPersonContract person : pdDoc.getDevelopmentProposal().getProposalPersons()) {
if (person.isPrincipalInvestigator()) {
proposalPerson = person;
}
}
}
return proposalPerson;
} }
|
public class class_name {
@Override
public ProposalPersonContract getPrincipalInvestigator(ProposalDevelopmentDocumentContract pdDoc) {
ProposalPersonContract proposalPerson = null;
if (pdDoc != null) {
for (ProposalPersonContract person : pdDoc.getDevelopmentProposal().getProposalPersons()) {
if (person.isPrincipalInvestigator()) {
proposalPerson = person; // depends on control dependency: [if], data = [none]
}
}
}
return proposalPerson;
} }
|
public class class_name {
public void parse(UNode fieldNode) {
assert fieldNode != null;
// Set field name.
setName(fieldNode.getName());
// Parse the nodes child nodes. If we find a "fields" definition, just save it
// for later.
for (String childName : fieldNode.getMemberNames()) {
// See if we recognize it.
UNode childNode = fieldNode.getMember(childName);
// "type"
if (childName.equals("type")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'type' must be a string: " + childNode);
Utils.require(m_type == null,
"'type' can only be specified once");
m_type = FieldType.fromString(childNode.getValue());
Utils.require(m_type != null,
"Unrecognized field 'type': " + childNode.getValue());
// "collection"
} else if (childName.equals("collection")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'collection' must be a string: " + childNode);
m_bIsCollection = Utils.getBooleanValue(childNode.getValue());
// "analyzer"
} else if (childName.equals("analyzer")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'analyzer' must be a string: " + childNode);
Utils.require(m_analyzerName == null,
"'analyzer' can only be specified once");
m_analyzerName = childNode.getValue();
// "inverse"
} else if (childName.equals("inverse")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'inverse' must be a string: " + childNode);
Utils.require(m_linkInverse == null,
"'inverse' can only be specified once");
m_linkInverse = childNode.getValue();
// "table"
} else if (childName.equals("table")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'table' must be a string: " + childNode);
Utils.require(m_linkExtent == null,
"'table' can only be specified once");
m_linkExtent = childNode.getValue();
// "fields"
} else if (childName.equals("fields")) {
// This field must be (or can become) a group.
Utils.require(m_type == null || m_type == FieldType.GROUP,
"Only group fields can have nested elements: " + m_name);
m_type = FieldType.GROUP;
// Value can only be specified once.
Utils.require(m_nestedFieldMap.size() == 0,
"'fields' can only be specified once: " + m_name);
Utils.require(childNode.hasMembers(),
"Group field must have at least one nested field defined: " + m_name);
for (String nestedFieldName : childNode.getMemberNames()) {
// Create a FieldDefinition for the nested field and parse details into it.
UNode nestedFieldNode = childNode.getMember(nestedFieldName);
FieldDefinition nestedField = new FieldDefinition();
nestedField.parse(nestedFieldNode);
addNestedField(nestedField);
}
// "sharded"
} else if (childName.equals("sharded")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'sharded' must be a string: " + childNode);
m_bIsSharded = Utils.getBooleanValue(childNode.getValue());
// "encoding"
} else if (childName.equals("encoding")) {
Utils.require(childNode.isValue(),
"Value of 'encoding' must be a string: " + childNode);
EncodingType encoding = EncodingType.fromString(childNode.getValue());
Utils.require(encoding != null, "Unrecognized 'encoding': " + childNode.getValue());
setEncoding(encoding);
// "junction"
} else if (childName.equals("junction")) {
Utils.require(childNode.isValue(), "Value of 'junction' must be a string: " + childNode);
m_junctionField = childNode.getValue();
// Unrecognized.
} else {
Utils.require(false, "Unrecognized field attribute: " + childName);
}
}
// If we didn't get a 'type', default to "text".
if (m_type == null) {
m_type = FieldType.TEXT;
}
verify();
} }
|
public class class_name {
public void parse(UNode fieldNode) {
assert fieldNode != null;
// Set field name.
setName(fieldNode.getName());
// Parse the nodes child nodes. If we find a "fields" definition, just save it
// for later.
for (String childName : fieldNode.getMemberNames()) {
// See if we recognize it.
UNode childNode = fieldNode.getMember(childName);
// "type"
if (childName.equals("type")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'type' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
Utils.require(m_type == null,
"'type' can only be specified once");
m_type = FieldType.fromString(childNode.getValue());
Utils.require(m_type != null,
"Unrecognized field 'type': " + childNode.getValue());
// depends on control dependency: [if], data = [none]
// "collection"
} else if (childName.equals("collection")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'collection' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
m_bIsCollection = Utils.getBooleanValue(childNode.getValue());
// depends on control dependency: [if], data = [none]
// "analyzer"
} else if (childName.equals("analyzer")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'analyzer' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
Utils.require(m_analyzerName == null,
"'analyzer' can only be specified once");
m_analyzerName = childNode.getValue();
// "inverse"
} else if (childName.equals("inverse")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'inverse' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
Utils.require(m_linkInverse == null,
"'inverse' can only be specified once");
m_linkInverse = childNode.getValue();
// "table"
} else if (childName.equals("table")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'table' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
Utils.require(m_linkExtent == null,
"'table' can only be specified once");
m_linkExtent = childNode.getValue();
// "fields"
} else if (childName.equals("fields")) {
// This field must be (or can become) a group.
Utils.require(m_type == null || m_type == FieldType.GROUP,
"Only group fields can have nested elements: " + m_name);
m_type = FieldType.GROUP;
// Value can only be specified once.
Utils.require(m_nestedFieldMap.size() == 0,
"'fields' can only be specified once: " + m_name);
// depends on control dependency: [if], data = [none]
Utils.require(childNode.hasMembers(),
"Group field must have at least one nested field defined: " + m_name);
// depends on control dependency: [if], data = [none]
for (String nestedFieldName : childNode.getMemberNames()) {
// Create a FieldDefinition for the nested field and parse details into it.
UNode nestedFieldNode = childNode.getMember(nestedFieldName);
FieldDefinition nestedField = new FieldDefinition();
nestedField.parse(nestedFieldNode);
// depends on control dependency: [for], data = [none]
addNestedField(nestedField);
// depends on control dependency: [for], data = [none]
}
// "sharded"
} else if (childName.equals("sharded")) {
// Value must be a string.
Utils.require(childNode.isValue(),
"Value of 'sharded' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
m_bIsSharded = Utils.getBooleanValue(childNode.getValue());
// depends on control dependency: [if], data = [none]
// "encoding"
} else if (childName.equals("encoding")) {
Utils.require(childNode.isValue(),
"Value of 'encoding' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
EncodingType encoding = EncodingType.fromString(childNode.getValue());
Utils.require(encoding != null, "Unrecognized 'encoding': " + childNode.getValue());
// depends on control dependency: [if], data = [none]
setEncoding(encoding);
// depends on control dependency: [if], data = [none]
// "junction"
} else if (childName.equals("junction")) {
Utils.require(childNode.isValue(), "Value of 'junction' must be a string: " + childNode);
// depends on control dependency: [if], data = [none]
m_junctionField = childNode.getValue();
// depends on control dependency: [if], data = [none]
// Unrecognized.
} else {
Utils.require(false, "Unrecognized field attribute: " + childName);
// depends on control dependency: [if], data = [none]
}
}
// If we didn't get a 'type', default to "text".
if (m_type == null) {
m_type = FieldType.TEXT;
// depends on control dependency: [if], data = [none]
}
verify();
} }
|
public class class_name {
@Deprecated
@GwtIncompatible("To be supported")
@Override
MapMaker expireAfterAccess(long duration, TimeUnit unit) {
checkExpiration(duration, unit);
this.expireAfterAccessNanos = unit.toNanos(duration);
if (duration == 0 && this.nullRemovalCause == null) {
// SIZE trumps EXPIRED
this.nullRemovalCause = RemovalCause.EXPIRED;
}
useCustomMap = true;
return this;
} }
|
public class class_name {
@Deprecated
@GwtIncompatible("To be supported")
@Override
MapMaker expireAfterAccess(long duration, TimeUnit unit) {
checkExpiration(duration, unit);
this.expireAfterAccessNanos = unit.toNanos(duration);
if (duration == 0 && this.nullRemovalCause == null) {
// SIZE trumps EXPIRED
this.nullRemovalCause = RemovalCause.EXPIRED; // depends on control dependency: [if], data = [none]
}
useCustomMap = true;
return this;
} }
|
public class class_name {
public static <T extends IItem & IExpandable> int countSelectedSubItems(final FastAdapter adapter, T header) {
SelectExtension extension = (SelectExtension) adapter.getExtension(SelectExtension.class);
if (extension != null) {
Set<IItem> selections = extension.getSelectedItems();
return countSelectedSubItems(selections, header);
}
return 0;
} }
|
public class class_name {
public static <T extends IItem & IExpandable> int countSelectedSubItems(final FastAdapter adapter, T header) {
SelectExtension extension = (SelectExtension) adapter.getExtension(SelectExtension.class);
if (extension != null) {
Set<IItem> selections = extension.getSelectedItems();
return countSelectedSubItems(selections, header); // depends on control dependency: [if], data = [none]
}
return 0;
} }
|
public class class_name {
public void addDocumentDependency(CmsObject cms, CmsDocumentDependency resDeps) {
if (resDeps != null) {
m_doc.addField(CmsSearchField.FIELD_DEPENDENCY_TYPE, resDeps.getType());
if ((resDeps.getMainDocument() != null) && (resDeps.getType() != null)) {
m_doc.addField(
CmsSearchField.FIELD_PREFIX_DEPENDENCY + resDeps.getType().toString(),
resDeps.getMainDocument().toDependencyString(cms));
}
for (CmsDocumentDependency dep : resDeps.getVariants()) {
m_doc.addField(
CmsSearchField.FIELD_PREFIX_DEPENDENCY + dep.getType().toString(),
dep.toDependencyString(cms));
}
for (CmsDocumentDependency dep : resDeps.getAttachments()) {
m_doc.addField(
CmsSearchField.FIELD_PREFIX_DEPENDENCY + dep.getType().toString(),
dep.toDependencyString(cms));
}
}
} }
|
public class class_name {
public void addDocumentDependency(CmsObject cms, CmsDocumentDependency resDeps) {
if (resDeps != null) {
m_doc.addField(CmsSearchField.FIELD_DEPENDENCY_TYPE, resDeps.getType()); // depends on control dependency: [if], data = [none]
if ((resDeps.getMainDocument() != null) && (resDeps.getType() != null)) {
m_doc.addField(
CmsSearchField.FIELD_PREFIX_DEPENDENCY + resDeps.getType().toString(),
resDeps.getMainDocument().toDependencyString(cms)); // depends on control dependency: [if], data = [none]
}
for (CmsDocumentDependency dep : resDeps.getVariants()) {
m_doc.addField(
CmsSearchField.FIELD_PREFIX_DEPENDENCY + dep.getType().toString(),
dep.toDependencyString(cms)); // depends on control dependency: [for], data = [none]
}
for (CmsDocumentDependency dep : resDeps.getAttachments()) {
m_doc.addField(
CmsSearchField.FIELD_PREFIX_DEPENDENCY + dep.getType().toString(),
dep.toDependencyString(cms)); // depends on control dependency: [for], data = [none]
}
}
} }
|
public class class_name {
@VisibleForTesting
List<Long> prepareRowBaseTimes() {
final ArrayList<Long> row_base_time_list = new ArrayList<Long>(
(int) ((end_row_time - start_row_time) / Const.MAX_TIMESPAN));
// NOTE: inclusive end here
long ts = (start_row_time - (start_row_time % Const.MAX_TIMESPAN));
while (ts <= end_row_time) {
row_base_time_list.add(ts);
ts += Const.MAX_TIMESPAN;
}
return row_base_time_list;
} }
|
public class class_name {
@VisibleForTesting
List<Long> prepareRowBaseTimes() {
final ArrayList<Long> row_base_time_list = new ArrayList<Long>(
(int) ((end_row_time - start_row_time) / Const.MAX_TIMESPAN));
// NOTE: inclusive end here
long ts = (start_row_time - (start_row_time % Const.MAX_TIMESPAN));
while (ts <= end_row_time) {
row_base_time_list.add(ts); // depends on control dependency: [while], data = [(ts]
ts += Const.MAX_TIMESPAN; // depends on control dependency: [while], data = [none]
}
return row_base_time_list;
} }
|
public class class_name {
public CmsPathTree<P, V> findNode(List<P> path) {
List<P> pathToConsume = Lists.newLinkedList(path);
CmsPathTree<P, V> descendant = findNodeInternal(pathToConsume);
if (!pathToConsume.isEmpty()) {
return null;
} else {
return descendant;
}
} }
|
public class class_name {
public CmsPathTree<P, V> findNode(List<P> path) {
List<P> pathToConsume = Lists.newLinkedList(path);
CmsPathTree<P, V> descendant = findNodeInternal(pathToConsume);
if (!pathToConsume.isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
} else {
return descendant; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public final V putIfAbsent(K key, LazyFactory<V> factory) {
final int hash = hash(key);
final int slot = indexOf(hash);
// search the chain from the slot
for (Entry<K, V> entry = table[slot]; entry != null; entry = entry.next) {
if (entry.hashCode == hash && entry.key.equals(key)) {
// found match
return entry.value;
}
}
// no match, insert a new value
V value = factory.create();
insertNewEntry(hash, key, value, slot);
// return the created value
return value;
} }
|
public class class_name {
public final V putIfAbsent(K key, LazyFactory<V> factory) {
final int hash = hash(key);
final int slot = indexOf(hash);
// search the chain from the slot
for (Entry<K, V> entry = table[slot]; entry != null; entry = entry.next) {
if (entry.hashCode == hash && entry.key.equals(key)) {
// found match
return entry.value; // depends on control dependency: [if], data = [none]
}
}
// no match, insert a new value
V value = factory.create();
insertNewEntry(hash, key, value, slot);
// return the created value
return value;
} }
|
public class class_name {
public static boolean shouldGzippedBodyBeZero(byte[] compressedBytes, HttpServletRequest request) {
//Check for 0 length body
if (compressedBytes.length == EMPTY_GZIPPED_CONTENT_SIZE) {
if (log.isTraceEnabled()) {
log.trace("{} resulted in an empty response.", request.getRequestURL());
}
return true;
} else {
return false;
}
} }
|
public class class_name {
public static boolean shouldGzippedBodyBeZero(byte[] compressedBytes, HttpServletRequest request) {
//Check for 0 length body
if (compressedBytes.length == EMPTY_GZIPPED_CONTENT_SIZE) {
if (log.isTraceEnabled()) {
log.trace("{} resulted in an empty response.", request.getRequestURL()); // depends on control dependency: [if], data = [none]
}
return true; // depends on control dependency: [if], data = [none]
} else {
return false; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
@SuppressWarnings("fallthrough")
Symbol selectBest(Env<AttrContext> env,
Type site,
List<Type> argtypes,
List<Type> typeargtypes,
Symbol sym,
Symbol bestSoFar,
boolean allowBoxing,
boolean useVarargs,
boolean operator) {
if (sym.kind == ERR ||
!sym.isInheritedIn(site.tsym, types)) {
return bestSoFar;
} else if (useVarargs && (sym.flags() & VARARGS) == 0) {
return bestSoFar.kind >= ERRONEOUS ?
new BadVarargsMethod((ResolveError)bestSoFar.baseSymbol()) :
bestSoFar;
}
Assert.check(sym.kind < AMBIGUOUS);
try {
Type mt = rawInstantiate(env, site, sym, null, argtypes, typeargtypes,
allowBoxing, useVarargs, types.noWarnings);
if (!operator || verboseResolutionMode.contains(VerboseResolutionMode.PREDEF))
currentResolutionContext.addApplicableCandidate(sym, mt);
} catch (InapplicableMethodException ex) {
if (!operator)
currentResolutionContext.addInapplicableCandidate(sym, ex.getDiagnostic());
switch (bestSoFar.kind) {
case ABSENT_MTH:
return new InapplicableSymbolError(currentResolutionContext);
case WRONG_MTH:
if (operator) return bestSoFar;
bestSoFar = new InapplicableSymbolsError(currentResolutionContext);
default:
return bestSoFar;
}
}
if (!isAccessible(env, site, sym)) {
return (bestSoFar.kind == ABSENT_MTH)
? new AccessError(env, site, sym)
: bestSoFar;
}
return (bestSoFar.kind > AMBIGUOUS)
? sym
: mostSpecific(argtypes, sym, bestSoFar, env, site,
allowBoxing && operator, useVarargs);
} }
|
public class class_name {
@SuppressWarnings("fallthrough")
Symbol selectBest(Env<AttrContext> env,
Type site,
List<Type> argtypes,
List<Type> typeargtypes,
Symbol sym,
Symbol bestSoFar,
boolean allowBoxing,
boolean useVarargs,
boolean operator) {
if (sym.kind == ERR ||
!sym.isInheritedIn(site.tsym, types)) {
return bestSoFar; // depends on control dependency: [if], data = [none]
} else if (useVarargs && (sym.flags() & VARARGS) == 0) {
return bestSoFar.kind >= ERRONEOUS ?
new BadVarargsMethod((ResolveError)bestSoFar.baseSymbol()) :
bestSoFar; // depends on control dependency: [if], data = [none]
}
Assert.check(sym.kind < AMBIGUOUS);
try {
Type mt = rawInstantiate(env, site, sym, null, argtypes, typeargtypes,
allowBoxing, useVarargs, types.noWarnings);
if (!operator || verboseResolutionMode.contains(VerboseResolutionMode.PREDEF))
currentResolutionContext.addApplicableCandidate(sym, mt);
} catch (InapplicableMethodException ex) {
if (!operator)
currentResolutionContext.addInapplicableCandidate(sym, ex.getDiagnostic());
switch (bestSoFar.kind) {
case ABSENT_MTH:
return new InapplicableSymbolError(currentResolutionContext);
case WRONG_MTH:
if (operator) return bestSoFar;
bestSoFar = new InapplicableSymbolsError(currentResolutionContext);
default:
return bestSoFar;
}
} // depends on control dependency: [catch], data = [none]
if (!isAccessible(env, site, sym)) {
return (bestSoFar.kind == ABSENT_MTH)
? new AccessError(env, site, sym)
: bestSoFar; // depends on control dependency: [if], data = [none]
}
return (bestSoFar.kind > AMBIGUOUS)
? sym
: mostSpecific(argtypes, sym, bestSoFar, env, site,
allowBoxing && operator, useVarargs);
} }
|
public class class_name {
public void marshall(AudioOnlyHlsSettings audioOnlyHlsSettings, ProtocolMarshaller protocolMarshaller) {
if (audioOnlyHlsSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(audioOnlyHlsSettings.getAudioGroupId(), AUDIOGROUPID_BINDING);
protocolMarshaller.marshall(audioOnlyHlsSettings.getAudioOnlyImage(), AUDIOONLYIMAGE_BINDING);
protocolMarshaller.marshall(audioOnlyHlsSettings.getAudioTrackType(), AUDIOTRACKTYPE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(AudioOnlyHlsSettings audioOnlyHlsSettings, ProtocolMarshaller protocolMarshaller) {
if (audioOnlyHlsSettings == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(audioOnlyHlsSettings.getAudioGroupId(), AUDIOGROUPID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(audioOnlyHlsSettings.getAudioOnlyImage(), AUDIOONLYIMAGE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(audioOnlyHlsSettings.getAudioTrackType(), AUDIOTRACKTYPE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
@Override
protected void doPerform() {
Select select = getSelectElement();
if (!select.isMultiple()) {
throw new UnsupportedOperationException("You may only deselect all options of a multi-select");
}
for (WebElement option : select.getOptions()) {
if (!option.isSelected()) {
option.click();
}
}
} }
|
public class class_name {
@Override
protected void doPerform() {
Select select = getSelectElement();
if (!select.isMultiple()) {
throw new UnsupportedOperationException("You may only deselect all options of a multi-select");
}
for (WebElement option : select.getOptions()) {
if (!option.isSelected()) {
option.click(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public MetricName resolve(String p) {
final String next;
if (p != null && !p.isEmpty()) {
if (key != null && !key.isEmpty()) {
next = key + SEPARATOR + p;
} else {
next = p;
}
} else {
next = this.key;
}
return new MetricName(next, tags);
} }
|
public class class_name {
public MetricName resolve(String p) {
final String next;
if (p != null && !p.isEmpty()) {
if (key != null && !key.isEmpty()) {
next = key + SEPARATOR + p; // depends on control dependency: [if], data = [none]
} else {
next = p; // depends on control dependency: [if], data = [none]
}
} else {
next = this.key; // depends on control dependency: [if], data = [none]
}
return new MetricName(next, tags);
} }
|
public class class_name {
protected void updateMSIdinCallDetailRecord(final String msId, final Sid callSid){
if(callSid == null){
if(logger.isDebugEnabled())
logger.debug("Call Id is not specisfied, it can be an outbound call.");
}else{
CallDetailRecordsDao dao = storage.getCallDetailRecordsDao();
CallDetailRecord cdr = dao.getCallDetailRecord(callSid);
if(cdr != null){
cdr = cdr.setMsId(msId);
dao.updateCallDetailRecord(cdr);
}else{
logger.error("provided call id did not found");
}
}
} }
|
public class class_name {
protected void updateMSIdinCallDetailRecord(final String msId, final Sid callSid){
if(callSid == null){
if(logger.isDebugEnabled())
logger.debug("Call Id is not specisfied, it can be an outbound call.");
}else{
CallDetailRecordsDao dao = storage.getCallDetailRecordsDao();
CallDetailRecord cdr = dao.getCallDetailRecord(callSid);
if(cdr != null){
cdr = cdr.setMsId(msId); // depends on control dependency: [if], data = [none]
dao.updateCallDetailRecord(cdr); // depends on control dependency: [if], data = [(cdr]
}else{
logger.error("provided call id did not found"); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
protected void onSerializableProperty(final String propertyName, final Class propertyType, final Object value) {
jsonContext.pushName(propertyName, count > 0);
jsonContext.serialize(value);
if (jsonContext.isNamePopped()) {
count++;
}
} }
|
public class class_name {
protected void onSerializableProperty(final String propertyName, final Class propertyType, final Object value) {
jsonContext.pushName(propertyName, count > 0);
jsonContext.serialize(value);
if (jsonContext.isNamePopped()) {
count++; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private static PCM clear_ligne(PCM pcm, PCM pcm_return){
List<Product> pdts = pcm.getProducts();
List<Cell> cells = new ArrayList<Cell>() ;
for (Product pr : pdts) {
float nbCellsEmpty = 0 ;
// On ajoute les cellules du product dans une liste
cells = pr.getCells();
// On traite les infos des cellules
for(Cell c : cells){
if(c.getContent().isEmpty()){
nbCellsEmpty ++ ;
}
}
if(cells.size() != 0){
System.out.println("Dans les lignes -- > \n Nombre de cellule vide :" + nbCellsEmpty + "\n Nombre de cellule : " + cells.size());
System.out.println("Valeur du if : " + nbCellsEmpty/cells.size());
if(!((nbCellsEmpty/cells.size()) > RATIO_EMPTY_CELL)){
System.out.println("on ajoute la ligne !");
pcm_return.addProduct(pr);
}
}
}
return pcm_return;
} }
|
public class class_name {
private static PCM clear_ligne(PCM pcm, PCM pcm_return){
List<Product> pdts = pcm.getProducts();
List<Cell> cells = new ArrayList<Cell>() ;
for (Product pr : pdts) {
float nbCellsEmpty = 0 ;
// On ajoute les cellules du product dans une liste
cells = pr.getCells(); // depends on control dependency: [for], data = [pr]
// On traite les infos des cellules
for(Cell c : cells){
if(c.getContent().isEmpty()){
nbCellsEmpty ++ ; // depends on control dependency: [if], data = [none]
}
}
if(cells.size() != 0){
System.out.println("Dans les lignes -- > \n Nombre de cellule vide :" + nbCellsEmpty + "\n Nombre de cellule : " + cells.size()); // depends on control dependency: [if], data = [none]
System.out.println("Valeur du if : " + nbCellsEmpty/cells.size()); // depends on control dependency: [if], data = [none]
if(!((nbCellsEmpty/cells.size()) > RATIO_EMPTY_CELL)){
System.out.println("on ajoute la ligne !"); // depends on control dependency: [if], data = [none]
pcm_return.addProduct(pr); // depends on control dependency: [if], data = [none]
}
}
}
return pcm_return;
} }
|
public class class_name {
protected void setLoggedInUser(final HttpServletRequest _request,
final String _userName)
{
if (_userName == null) {
_request.getSession(true).removeAttribute(this.sessionParameterLoginName);
} else {
_request.getSession(true).setAttribute(this.sessionParameterLoginName,
_userName);
}
} }
|
public class class_name {
protected void setLoggedInUser(final HttpServletRequest _request,
final String _userName)
{
if (_userName == null) {
_request.getSession(true).removeAttribute(this.sessionParameterLoginName); // depends on control dependency: [if], data = [none]
} else {
_request.getSession(true).setAttribute(this.sessionParameterLoginName,
_userName); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static HazardCurve createHazardCurveFromSurvivalProbabilities(String name, double[] times, double[] givenSurvivalProbabilities){
HazardCurve survivalProbabilities = new HazardCurve(name);
for(int timeIndex=0; timeIndex<times.length;timeIndex++) {
survivalProbabilities.addSurvivalProbability(times[timeIndex], givenSurvivalProbabilities[timeIndex], times[timeIndex] > 0);
}
return survivalProbabilities;
} }
|
public class class_name {
public static HazardCurve createHazardCurveFromSurvivalProbabilities(String name, double[] times, double[] givenSurvivalProbabilities){
HazardCurve survivalProbabilities = new HazardCurve(name);
for(int timeIndex=0; timeIndex<times.length;timeIndex++) {
survivalProbabilities.addSurvivalProbability(times[timeIndex], givenSurvivalProbabilities[timeIndex], times[timeIndex] > 0); // depends on control dependency: [for], data = [timeIndex]
}
return survivalProbabilities;
} }
|
public class class_name {
void expandCurrentFolder() {
if (m_currentFolder != null) {
m_treeContainer.setChildrenAllowed(m_currentFolder, true);
m_fileTree.expandItem(m_currentFolder);
}
} }
|
public class class_name {
void expandCurrentFolder() {
if (m_currentFolder != null) {
m_treeContainer.setChildrenAllowed(m_currentFolder, true); // depends on control dependency: [if], data = [(m_currentFolder]
m_fileTree.expandItem(m_currentFolder); // depends on control dependency: [if], data = [(m_currentFolder]
}
} }
|
public class class_name {
private Node<K, V> cutOldestChild(Node<K, V> n) {
Node<K, V> oldestChild = n.o_c;
if (oldestChild != null) {
if (oldestChild.y_s != null) {
oldestChild.y_s.o_s = n;
}
n.o_c = oldestChild.y_s;
oldestChild.y_s = null;
oldestChild.o_s = null;
}
return oldestChild;
} }
|
public class class_name {
private Node<K, V> cutOldestChild(Node<K, V> n) {
Node<K, V> oldestChild = n.o_c;
if (oldestChild != null) {
if (oldestChild.y_s != null) {
oldestChild.y_s.o_s = n; // depends on control dependency: [if], data = [none]
}
n.o_c = oldestChild.y_s; // depends on control dependency: [if], data = [none]
oldestChild.y_s = null; // depends on control dependency: [if], data = [none]
oldestChild.o_s = null; // depends on control dependency: [if], data = [none]
}
return oldestChild;
} }
|
public class class_name {
protected String findPackageSuffix(String uri) {
String temp = uri.startsWith("/") ? uri.substring(1) : uri;
temp = temp.replace(".", "_");
temp = temp.replace("/", ".");
//find all matches
List<String> candidates = new ArrayList<>();
for (String pack : Configuration.getControllerPackages()) {
if (temp.startsWith(pack) && (temp.length() == pack.length() || temp.length() > pack.length() && temp.charAt(pack.length()) == '.')) {
candidates.add(pack);
}
}
int resultIndex = 0;
int size = 0;
//find the longest package
for (int i = 0; i < candidates.size(); i++) {
String candidate = candidates.get(i);
if (candidate.length() > size) {
size = candidate.length();
resultIndex = i;
}
}
return !candidates.isEmpty() ? candidates.get(resultIndex) : null;
} }
|
public class class_name {
protected String findPackageSuffix(String uri) {
String temp = uri.startsWith("/") ? uri.substring(1) : uri;
temp = temp.replace(".", "_");
temp = temp.replace("/", ".");
//find all matches
List<String> candidates = new ArrayList<>();
for (String pack : Configuration.getControllerPackages()) {
if (temp.startsWith(pack) && (temp.length() == pack.length() || temp.length() > pack.length() && temp.charAt(pack.length()) == '.')) {
candidates.add(pack); // depends on control dependency: [if], data = [none]
}
}
int resultIndex = 0;
int size = 0;
//find the longest package
for (int i = 0; i < candidates.size(); i++) {
String candidate = candidates.get(i);
if (candidate.length() > size) {
size = candidate.length(); // depends on control dependency: [if], data = [none]
resultIndex = i; // depends on control dependency: [if], data = [none]
}
}
return !candidates.isEmpty() ? candidates.get(resultIndex) : null;
} }
|
public class class_name {
@GuardedBy("lock")
private void scheduleBackoff(final Status status) {
class EndOfCurrentBackoff implements Runnable {
@Override
public void run() {
try {
synchronized (lock) {
reconnectTask = null;
if (reconnectCanceled) {
// Even though cancelReconnectTask() will cancel this task, the task may have already
// started when it's being canceled.
return;
}
channelLogger.log(ChannelLogLevel.INFO, "CONNECTING after backoff");
gotoNonErrorState(CONNECTING);
startNewTransport();
}
} catch (Throwable t) {
// TODO(zhangkun): we may consider using SynchronizationContext to schedule the reconnect
// timer, so that we don't need this catch, since SynchronizationContext would catch it.
log.log(Level.WARNING, "Exception handling end of backoff", t);
} finally {
syncContext.drain();
}
}
}
gotoState(ConnectivityStateInfo.forTransientFailure(status));
if (reconnectPolicy == null) {
reconnectPolicy = backoffPolicyProvider.get();
}
long delayNanos =
reconnectPolicy.nextBackoffNanos() - connectingTimer.elapsed(TimeUnit.NANOSECONDS);
channelLogger.log(
ChannelLogLevel.INFO,
"TRANSIENT_FAILURE ({0}). Will reconnect after {1} ns",
printShortStatus(status), delayNanos);
Preconditions.checkState(reconnectTask == null, "previous reconnectTask is not done");
reconnectCanceled = false;
reconnectTask = scheduledExecutor.schedule(
new LogExceptionRunnable(new EndOfCurrentBackoff()),
delayNanos,
TimeUnit.NANOSECONDS);
} }
|
public class class_name {
@GuardedBy("lock")
private void scheduleBackoff(final Status status) {
class EndOfCurrentBackoff implements Runnable {
@Override
public void run() {
try {
synchronized (lock) { // depends on control dependency: [try], data = [none]
reconnectTask = null;
if (reconnectCanceled) {
// Even though cancelReconnectTask() will cancel this task, the task may have already
// started when it's being canceled.
return; // depends on control dependency: [if], data = [none]
}
channelLogger.log(ChannelLogLevel.INFO, "CONNECTING after backoff");
gotoNonErrorState(CONNECTING);
startNewTransport();
}
} catch (Throwable t) {
// TODO(zhangkun): we may consider using SynchronizationContext to schedule the reconnect
// timer, so that we don't need this catch, since SynchronizationContext would catch it.
log.log(Level.WARNING, "Exception handling end of backoff", t);
} finally { // depends on control dependency: [catch], data = [none]
syncContext.drain();
}
}
}
gotoState(ConnectivityStateInfo.forTransientFailure(status));
if (reconnectPolicy == null) {
reconnectPolicy = backoffPolicyProvider.get(); // depends on control dependency: [if], data = [none]
}
long delayNanos =
reconnectPolicy.nextBackoffNanos() - connectingTimer.elapsed(TimeUnit.NANOSECONDS);
channelLogger.log(
ChannelLogLevel.INFO,
"TRANSIENT_FAILURE ({0}). Will reconnect after {1} ns",
printShortStatus(status), delayNanos);
Preconditions.checkState(reconnectTask == null, "previous reconnectTask is not done");
reconnectCanceled = false;
reconnectTask = scheduledExecutor.schedule(
new LogExceptionRunnable(new EndOfCurrentBackoff()),
delayNanos,
TimeUnit.NANOSECONDS);
} }
|
public class class_name {
public synchronized void destroy()
{
if (state == DESTROYED)
return;
if (state != DISCONNECTED)
try {
tl.disconnect(this);
}
catch (final KNXLinkClosedException e) {
// we already should've been destroyed on catching this exception
}
setState(DESTROYED);
tl.destroyDestination(this);
} }
|
public class class_name {
public synchronized void destroy()
{
if (state == DESTROYED)
return;
if (state != DISCONNECTED)
try {
tl.disconnect(this);
// depends on control dependency: [try], data = [none]
}
catch (final KNXLinkClosedException e) {
// we already should've been destroyed on catching this exception
}
// depends on control dependency: [catch], data = [none]
setState(DESTROYED);
tl.destroyDestination(this);
} }
|
public class class_name {
public static List<String> split(String pArg, String pEscape, String pDelimiter) {
if (pArg != null) {
ArrayList<String> ret = new ArrayList<String>();
Pattern[] pattern = SPLIT_PATTERNS.get(pEscape + pDelimiter);
if (pattern == null) {
pattern = createSplitPatterns(pEscape, pDelimiter);
SPLIT_PATTERNS.put(pEscape + pDelimiter,pattern);
}
final Matcher m = pattern[0].matcher(pArg);
while (m.find() && m.start(1) != pArg.length()) {
// Finally unescape all escaped parts. Trailing escapes are captured before the delimiter applies
String trailingEscapes = m.group(2);
ret.add(pattern[1].matcher(m.group(1) + (trailingEscapes != null ? trailingEscapes : "")).replaceAll("$1"));
}
return ret;
} else {
return null;
}
} }
|
public class class_name {
public static List<String> split(String pArg, String pEscape, String pDelimiter) {
if (pArg != null) {
ArrayList<String> ret = new ArrayList<String>();
Pattern[] pattern = SPLIT_PATTERNS.get(pEscape + pDelimiter);
if (pattern == null) {
pattern = createSplitPatterns(pEscape, pDelimiter); // depends on control dependency: [if], data = [none]
SPLIT_PATTERNS.put(pEscape + pDelimiter,pattern); // depends on control dependency: [if], data = [none]
}
final Matcher m = pattern[0].matcher(pArg);
while (m.find() && m.start(1) != pArg.length()) {
// Finally unescape all escaped parts. Trailing escapes are captured before the delimiter applies
String trailingEscapes = m.group(2);
ret.add(pattern[1].matcher(m.group(1) + (trailingEscapes != null ? trailingEscapes : "")).replaceAll("$1"));
}
return ret;
} else {
return null;
}
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private <T> List<T> getListValue(Class<T> type, String propertyName, EDBObject object) {
List<T> temp = new ArrayList<>();
for (int i = 0;; i++) {
String property = getEntryNameForList(propertyName, i);
Object obj = object.getObject(property);
if (obj == null) {
break;
}
if (OpenEngSBModel.class.isAssignableFrom(type)) {
obj = convertEDBObjectToUncheckedModel(type, edbService.getObject(object.getString(property)));
}
temp.add((T) obj);
object.remove(property);
}
return temp;
} }
|
public class class_name {
@SuppressWarnings("unchecked")
private <T> List<T> getListValue(Class<T> type, String propertyName, EDBObject object) {
List<T> temp = new ArrayList<>();
for (int i = 0;; i++) {
String property = getEntryNameForList(propertyName, i);
Object obj = object.getObject(property);
if (obj == null) {
break;
}
if (OpenEngSBModel.class.isAssignableFrom(type)) {
obj = convertEDBObjectToUncheckedModel(type, edbService.getObject(object.getString(property))); // depends on control dependency: [if], data = [none]
}
temp.add((T) obj); // depends on control dependency: [for], data = [none]
object.remove(property); // depends on control dependency: [for], data = [none]
}
return temp;
} }
|
public class class_name {
public static InputStream getResourceAsStream(String resource) {
InputStream in = ClassUtils.getContextClassLoader().getResourceAsStream(resource);
if (in == null) {
in = IOUtils.class.getResourceAsStream(resource);
}
return in;
} }
|
public class class_name {
public static InputStream getResourceAsStream(String resource) {
InputStream in = ClassUtils.getContextClassLoader().getResourceAsStream(resource);
if (in == null) {
in = IOUtils.class.getResourceAsStream(resource); // depends on control dependency: [if], data = [none]
}
return in;
} }
|
public class class_name {
public void execute() throws MojoExecutionException, MojoFailureException
{
if (file == null)
throw new MojoFailureException("File not defined");
if (!file.exists())
throw new MojoFailureException("File doesn't exists: " + file);
FileInputStream fis = null;
try
{
Boolean result = null;
if (isLocal())
{
Object value = executeCommand("local-deploy", new Serializable[] {file.toURI().toURL()});
if (value instanceof Boolean)
{
result = (Boolean)value;
}
else
{
throw (Throwable)value;
}
}
else
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
fis = new FileInputStream(file);
int i = fis.read();
while (i != -1)
{
baos.write(i);
i = fis.read();
}
byte[] bytes = baos.toByteArray();
Object value = executeCommand("remote-deploy", new Serializable[] {file.getName(), bytes});
if (value instanceof Boolean)
{
result = (Boolean)value;
}
else
{
throw (Throwable)value;
}
}
if (result.booleanValue())
{
getLog().info("Deployed: " + file.getName());
}
else
{
getLog().info(file.getName() + " wasn't deployed");
}
}
catch (Throwable t)
{
throw new MojoFailureException("Unable to deploy to " + getHost() + ":" + getPort() +
" (" + t.getMessage() + ")", t);
}
finally
{
if (fis != null)
{
try
{
fis.close();
}
catch (IOException ioe)
{
// Ignore
}
}
}
} }
|
public class class_name {
public void execute() throws MojoExecutionException, MojoFailureException
{
if (file == null)
throw new MojoFailureException("File not defined");
if (!file.exists())
throw new MojoFailureException("File doesn't exists: " + file);
FileInputStream fis = null;
try
{
Boolean result = null;
if (isLocal())
{
Object value = executeCommand("local-deploy", new Serializable[] {file.toURI().toURL()});
if (value instanceof Boolean)
{
result = (Boolean)value; // depends on control dependency: [if], data = [none]
}
else
{
throw (Throwable)value;
}
}
else
{
ByteArrayOutputStream baos = new ByteArrayOutputStream();
fis = new FileInputStream(file); // depends on control dependency: [if], data = [none]
int i = fis.read();
while (i != -1)
{
baos.write(i); // depends on control dependency: [while], data = [(i]
i = fis.read(); // depends on control dependency: [while], data = [none]
}
byte[] bytes = baos.toByteArray();
Object value = executeCommand("remote-deploy", new Serializable[] {file.getName(), bytes});
if (value instanceof Boolean)
{
result = (Boolean)value; // depends on control dependency: [if], data = [none]
}
else
{
throw (Throwable)value;
}
}
if (result.booleanValue())
{
getLog().info("Deployed: " + file.getName()); // depends on control dependency: [if], data = [none]
}
else
{
getLog().info(file.getName() + " wasn't deployed"); // depends on control dependency: [if], data = [none]
}
}
catch (Throwable t)
{
throw new MojoFailureException("Unable to deploy to " + getHost() + ":" + getPort() +
" (" + t.getMessage() + ")", t);
}
finally
{
if (fis != null)
{
try
{
fis.close(); // depends on control dependency: [try], data = [none]
}
catch (IOException ioe)
{
// Ignore
} // depends on control dependency: [catch], data = [none]
}
}
} }
|
public class class_name {
public FieldMember findInstanceField(String name) {
FieldMember found = getLatestTypeDescriptor().getField(name);
if (found != null) {
return found;
}
// Walk up the supertypes - this is looking for instance fields so no need to search interfaces
String slashedSupername = getTypeDescriptor().getSupertypeName();
ReloadableType rtype = typeRegistry.getReloadableType(slashedSupername);
while (rtype != null) {
found = rtype.getLatestTypeDescriptor().getField(name);
if (found != null) {
break;
}
slashedSupername = rtype.getTypeDescriptor().getSupertypeName();
rtype = typeRegistry.getReloadableType(slashedSupername);
}
return found;
} }
|
public class class_name {
public FieldMember findInstanceField(String name) {
FieldMember found = getLatestTypeDescriptor().getField(name);
if (found != null) {
return found; // depends on control dependency: [if], data = [none]
}
// Walk up the supertypes - this is looking for instance fields so no need to search interfaces
String slashedSupername = getTypeDescriptor().getSupertypeName();
ReloadableType rtype = typeRegistry.getReloadableType(slashedSupername);
while (rtype != null) {
found = rtype.getLatestTypeDescriptor().getField(name); // depends on control dependency: [while], data = [none]
if (found != null) {
break;
}
slashedSupername = rtype.getTypeDescriptor().getSupertypeName(); // depends on control dependency: [while], data = [none]
rtype = typeRegistry.getReloadableType(slashedSupername); // depends on control dependency: [while], data = [none]
}
return found;
} }
|
public class class_name {
static int getPoaThreadPoolMax() {
if (!thread_pool_max_done) {
final String str = System.getProperty("jacorb.poa.thread_pool_max");
int value;
try {
value = Integer.parseInt(str);
if (value > 0) {
thread_pool_max = value;
}
} catch (final NumberFormatException e) {
}
thread_pool_max_done = true;
}
return thread_pool_max;
} }
|
public class class_name {
static int getPoaThreadPoolMax() {
if (!thread_pool_max_done) {
final String str = System.getProperty("jacorb.poa.thread_pool_max");
int value;
try {
value = Integer.parseInt(str); // depends on control dependency: [try], data = [none]
if (value > 0) {
thread_pool_max = value; // depends on control dependency: [if], data = [none]
}
} catch (final NumberFormatException e) {
} // depends on control dependency: [catch], data = [none]
thread_pool_max_done = true; // depends on control dependency: [if], data = [none]
}
return thread_pool_max;
} }
|
public class class_name {
private void parseFieldRemove(String fieldName, UNode addNode) {
Set<String> removeValueSet = new LinkedHashSet<>();
for (UNode valueNode : addNode.getMemberList()) {
Utils.require(valueNode.isValue() && valueNode.getName().equals("value"),
"Value expected for 'remove' element: " + valueNode);
removeValueSet.add(valueNode.getValue());
}
removeFieldValues(fieldName, removeValueSet);
} }
|
public class class_name {
private void parseFieldRemove(String fieldName, UNode addNode) {
Set<String> removeValueSet = new LinkedHashSet<>();
for (UNode valueNode : addNode.getMemberList()) {
Utils.require(valueNode.isValue() && valueNode.getName().equals("value"),
"Value expected for 'remove' element: " + valueNode);
// depends on control dependency: [for], data = [valueNode]
removeValueSet.add(valueNode.getValue());
// depends on control dependency: [for], data = [valueNode]
}
removeFieldValues(fieldName, removeValueSet);
} }
|
public class class_name {
protected void updateCiManagement(CiManagement value, String xmlTag, Counter counter, Element element)
{
boolean shouldExist = value != null;
Element root = updateElement(counter, element, xmlTag, shouldExist);
if (shouldExist)
{
Counter innerCount = new Counter(counter.getDepth() + 1);
findAndReplaceSimpleElement(innerCount, root, "system", value.getSystem(), null);
findAndReplaceSimpleElement(innerCount, root, "url", value.getUrl(), null);
iterateNotifier(innerCount, root, value.getNotifiers(), "notifiers", "notifier");
}
} }
|
public class class_name {
protected void updateCiManagement(CiManagement value, String xmlTag, Counter counter, Element element)
{
boolean shouldExist = value != null;
Element root = updateElement(counter, element, xmlTag, shouldExist);
if (shouldExist)
{
Counter innerCount = new Counter(counter.getDepth() + 1);
findAndReplaceSimpleElement(innerCount, root, "system", value.getSystem(), null); // depends on control dependency: [if], data = [none]
findAndReplaceSimpleElement(innerCount, root, "url", value.getUrl(), null); // depends on control dependency: [if], data = [none]
iterateNotifier(innerCount, root, value.getNotifiers(), "notifiers", "notifier"); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private boolean isUnit(final int lit, final MSClause clause) {
for (int i = 0; i < clause.size(); ++i) {
final int clauseLit = clause.get(i);
if (lit != clauseLit && this.model.get(var(clauseLit)) != sign(clauseLit)) {
return false;
}
}
return true;
} }
|
public class class_name {
private boolean isUnit(final int lit, final MSClause clause) {
for (int i = 0; i < clause.size(); ++i) {
final int clauseLit = clause.get(i);
if (lit != clauseLit && this.model.get(var(clauseLit)) != sign(clauseLit)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} }
|
public class class_name {
@Deprecated
public String getAccessToken() {
Session s = getSession();
if (s != null) {
return s.getAccessToken();
} else {
return null;
}
} }
|
public class class_name {
@Deprecated
public String getAccessToken() {
Session s = getSession();
if (s != null) {
return s.getAccessToken(); // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
private Token indent() {
Matcher matcher;
String re;
if (indentRe != null) {
matcher = scanner.getMatcherForPattern(indentRe);
} else {
// tabs
re = "^\\n(\\t*) *";
String indentType = "tabs";
matcher = scanner.getMatcherForPattern(re);
// spaces
if (matcher.find(0) && matcher.group(1).length() == 0) {
re = "^\\n( *)";
indentType = "spaces";
matcher = scanner.getMatcherForPattern(re);
}
// established
if (matcher.find(0) && matcher.group(1).length() > 0)
this.indentRe = re;
this.indentType = indentType;
}
if (matcher.find(0) && matcher.groupCount() > 0) {
Token tok;
int indents = matcher.group(1).length();
lineno++;
consume(indents + 1);
if(scanner.getInput().length() > 0 && (scanner.getInput().charAt(0) == ' ' || scanner.getInput().charAt(0) == '\t')){
throw new JadeLexerException("Invalid indentation, you can use tabs or spaces but not both", filename, getLineno(), templateLoader);
}
// if (lastIndents <= 0 && indents > 0)
// lastIndents = indents;
// if ((indents > 0 && lastIndents > 0 && indents % lastIndents != 0) || scanner.isIntendantionViolated()) {
// throw new JadeLexerException("invalid indentation; expecting " + indents + " " + indentType, filename, getLineno(), templateLoader);
// }
// blank line
if (scanner.isBlankLine()) {
this.pipeless = false;
return new Newline(lineno);
}
// outdent
if (indentStack.size() > 0 && indents < indentStack.get(0)) {
while (indentStack.size() > 0 && indentStack.get(0) > indents) {
stash.add(new Outdent(lineno));
indentStack.poll();
}
tok = this.stash.pollLast();
// indent
} else if (indents > 0 && (indentStack.size() == 0 || indents != indentStack.get(0))) {
indentStack.push(indents);
tok = new Indent(String.valueOf(indents), lineno);
tok.setIndents(indents);
// newline
} else {
tok = new Newline(lineno);
}
this.pipeless = false;
return tok;
}
return null;
} }
|
public class class_name {
private Token indent() {
Matcher matcher;
String re;
if (indentRe != null) {
matcher = scanner.getMatcherForPattern(indentRe); // depends on control dependency: [if], data = [(indentRe]
} else {
// tabs
re = "^\\n(\\t*) *"; // depends on control dependency: [if], data = [none]
String indentType = "tabs";
matcher = scanner.getMatcherForPattern(re); // depends on control dependency: [if], data = [none]
// spaces
if (matcher.find(0) && matcher.group(1).length() == 0) {
re = "^\\n( *)"; // depends on control dependency: [if], data = [none]
indentType = "spaces"; // depends on control dependency: [if], data = [none]
matcher = scanner.getMatcherForPattern(re); // depends on control dependency: [if], data = [none]
}
// established
if (matcher.find(0) && matcher.group(1).length() > 0)
this.indentRe = re;
this.indentType = indentType; // depends on control dependency: [if], data = [none]
}
if (matcher.find(0) && matcher.groupCount() > 0) {
Token tok;
int indents = matcher.group(1).length();
lineno++; // depends on control dependency: [if], data = [none]
consume(indents + 1); // depends on control dependency: [if], data = [none]
if(scanner.getInput().length() > 0 && (scanner.getInput().charAt(0) == ' ' || scanner.getInput().charAt(0) == '\t')){
throw new JadeLexerException("Invalid indentation, you can use tabs or spaces but not both", filename, getLineno(), templateLoader);
}
// if (lastIndents <= 0 && indents > 0)
// lastIndents = indents;
// if ((indents > 0 && lastIndents > 0 && indents % lastIndents != 0) || scanner.isIntendantionViolated()) {
// throw new JadeLexerException("invalid indentation; expecting " + indents + " " + indentType, filename, getLineno(), templateLoader);
// }
// blank line
if (scanner.isBlankLine()) {
this.pipeless = false; // depends on control dependency: [if], data = [none]
return new Newline(lineno); // depends on control dependency: [if], data = [none]
}
// outdent
if (indentStack.size() > 0 && indents < indentStack.get(0)) {
while (indentStack.size() > 0 && indentStack.get(0) > indents) {
stash.add(new Outdent(lineno)); // depends on control dependency: [while], data = [none]
indentStack.poll(); // depends on control dependency: [while], data = [none]
}
tok = this.stash.pollLast(); // depends on control dependency: [if], data = [none]
// indent
} else if (indents > 0 && (indentStack.size() == 0 || indents != indentStack.get(0))) {
indentStack.push(indents); // depends on control dependency: [if], data = [none]
tok = new Indent(String.valueOf(indents), lineno); // depends on control dependency: [if], data = [none]
tok.setIndents(indents); // depends on control dependency: [if], data = [none]
// newline
} else {
tok = new Newline(lineno); // depends on control dependency: [if], data = [none]
}
this.pipeless = false; // depends on control dependency: [if], data = [none]
return tok; // depends on control dependency: [if], data = [none]
}
return null;
} }
|
public class class_name {
@Override
public final Set<IPersonAttributes> mergeResults(final Set<IPersonAttributes> toModify, final Set<IPersonAttributes> toConsider) {
Validate.notNull(toModify, "toModify cannot be null");
Validate.notNull(toConsider, "toConsider cannot be null");
//Convert the toModify Set into a Map to allow for easier lookups
final Map<String, IPersonAttributes> toModfyPeople = new LinkedHashMap<>();
for (final IPersonAttributes toModifyPerson : toModify) {
toModfyPeople.put(toModifyPerson.getName(), toModifyPerson);
}
//Merge in the toConsider people
for (final IPersonAttributes toConsiderPerson : toConsider) {
final String toConsiderName = toConsiderPerson.getName();
final IPersonAttributes toModifyPerson = toModfyPeople.get(toConsiderName);
//No matching toModify person, just add the new person
if (toModifyPerson == null) {
toModify.add(toConsiderPerson);
}
//Matching toModify person, merge their attributes
else {
final Map<String, List<Object>> toModifyAttributes = this.buildMutableAttributeMap(toModifyPerson.getAttributes());
final Map<String, List<Object>> mergedAttributes = this.mergePersonAttributes(toModifyAttributes, toConsiderPerson.getAttributes());
final NamedPersonImpl mergedPerson = new NamedPersonImpl(toConsiderName, mergedAttributes);
//Remove then re-add the mergedPerson entry
toModify.remove(mergedPerson);
toModify.add(mergedPerson);
}
}
return toModify;
} }
|
public class class_name {
@Override
public final Set<IPersonAttributes> mergeResults(final Set<IPersonAttributes> toModify, final Set<IPersonAttributes> toConsider) {
Validate.notNull(toModify, "toModify cannot be null");
Validate.notNull(toConsider, "toConsider cannot be null");
//Convert the toModify Set into a Map to allow for easier lookups
final Map<String, IPersonAttributes> toModfyPeople = new LinkedHashMap<>();
for (final IPersonAttributes toModifyPerson : toModify) {
toModfyPeople.put(toModifyPerson.getName(), toModifyPerson); // depends on control dependency: [for], data = [toModifyPerson]
}
//Merge in the toConsider people
for (final IPersonAttributes toConsiderPerson : toConsider) {
final String toConsiderName = toConsiderPerson.getName();
final IPersonAttributes toModifyPerson = toModfyPeople.get(toConsiderName);
//No matching toModify person, just add the new person
if (toModifyPerson == null) {
toModify.add(toConsiderPerson); // depends on control dependency: [if], data = [none]
}
//Matching toModify person, merge their attributes
else {
final Map<String, List<Object>> toModifyAttributes = this.buildMutableAttributeMap(toModifyPerson.getAttributes());
final Map<String, List<Object>> mergedAttributes = this.mergePersonAttributes(toModifyAttributes, toConsiderPerson.getAttributes());
final NamedPersonImpl mergedPerson = new NamedPersonImpl(toConsiderName, mergedAttributes);
//Remove then re-add the mergedPerson entry
toModify.remove(mergedPerson); // depends on control dependency: [if], data = [none]
toModify.add(mergedPerson); // depends on control dependency: [if], data = [none]
}
}
return toModify;
} }
|
public class class_name {
@Override
public EClass getIfcConstructionResourceType() {
if (ifcConstructionResourceTypeEClass == null) {
ifcConstructionResourceTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(135);
}
return ifcConstructionResourceTypeEClass;
} }
|
public class class_name {
@Override
public EClass getIfcConstructionResourceType() {
if (ifcConstructionResourceTypeEClass == null) {
ifcConstructionResourceTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(135);
// depends on control dependency: [if], data = [none]
}
return ifcConstructionResourceTypeEClass;
} }
|
public class class_name {
public static void independent( BufferedImage input ) {
// convert the BufferedImage into a Planar
Planar<GrayU8> image = ConvertBufferedImage.convertFromPlanar(input,null,true,GrayU8.class);
// declare the output blurred image
Planar<GrayU8> blurred = image.createSameShape();
// Apply Gaussian blur to each band in the image
for( int i = 0; i < image.getNumBands(); i++ ) {
// note that the generalized version of BlurImageOps is not being used, but the type
// specific version.
BlurImageOps.gaussian(image.getBand(i),blurred.getBand(i),-1,5,null);
}
// Declare the BufferedImage manually to ensure that the color bands have the same ordering on input
// and output
BufferedImage output = new BufferedImage(image.width,image.height,input.getType());
ConvertBufferedImage.convertTo(blurred, output,true);
gui.addImage(input,"Input");
gui.addImage(output,"Gaussian Blur");
} }
|
public class class_name {
public static void independent( BufferedImage input ) {
// convert the BufferedImage into a Planar
Planar<GrayU8> image = ConvertBufferedImage.convertFromPlanar(input,null,true,GrayU8.class);
// declare the output blurred image
Planar<GrayU8> blurred = image.createSameShape();
// Apply Gaussian blur to each band in the image
for( int i = 0; i < image.getNumBands(); i++ ) {
// note that the generalized version of BlurImageOps is not being used, but the type
// specific version.
BlurImageOps.gaussian(image.getBand(i),blurred.getBand(i),-1,5,null); // depends on control dependency: [for], data = [i]
}
// Declare the BufferedImage manually to ensure that the color bands have the same ordering on input
// and output
BufferedImage output = new BufferedImage(image.width,image.height,input.getType());
ConvertBufferedImage.convertTo(blurred, output,true);
gui.addImage(input,"Input");
gui.addImage(output,"Gaussian Blur");
} }
|
public class class_name {
@SuppressWarnings("rawtypes")
private static PluginDefinition parsePluginDefinition(final ClassLoader cl, final Element plugin) throws PluginConfigurationException {
// Check if the plugin definition is inside its own file
if (getAttributeValue(plugin, "definedIn", false) != null) {
StreamManager sm = new StreamManager();
String sFileName = getAttributeValue(plugin, "definedIn", false);
try {
InputStream in = sm.handle(cl.getResourceAsStream(sFileName));
return parseXmlPluginDefinition(cl, in);
} finally {
sm.closeAll();
}
}
String pluginClass = getAttributeValue(plugin, "class", true);
Class clazz;
try {
clazz = LoadedClassCache.getClass(cl, pluginClass);
if (!IPluginInterface.class.isAssignableFrom(clazz)) {
throw new PluginConfigurationException("Specified class '" + clazz.getName() + "' in the plugin.xml file does not implement "
+ "the IPluginInterface interface");
}
if (isAnnotated(clazz)) {
return loadFromPluginAnnotation(clazz);
}
} catch (ClassNotFoundException e) {
throw new PluginConfigurationException(e.getMessage(), e);
}
// The class is not annotated not has an external definition file...
// Loading from current xml file...
String sDescription = getAttributeValue(plugin, "description", false);
@SuppressWarnings("unchecked")
PluginDefinition pluginDef = new PluginDefinition(getAttributeValue(plugin, "name", true), sDescription, clazz);
parseCommandLine(pluginDef, plugin);
return pluginDef;
} }
|
public class class_name {
@SuppressWarnings("rawtypes")
private static PluginDefinition parsePluginDefinition(final ClassLoader cl, final Element plugin) throws PluginConfigurationException {
// Check if the plugin definition is inside its own file
if (getAttributeValue(plugin, "definedIn", false) != null) {
StreamManager sm = new StreamManager();
String sFileName = getAttributeValue(plugin, "definedIn", false);
try {
InputStream in = sm.handle(cl.getResourceAsStream(sFileName));
return parseXmlPluginDefinition(cl, in); // depends on control dependency: [try], data = [none]
} finally {
sm.closeAll();
}
}
String pluginClass = getAttributeValue(plugin, "class", true);
Class clazz;
try {
clazz = LoadedClassCache.getClass(cl, pluginClass);
if (!IPluginInterface.class.isAssignableFrom(clazz)) {
throw new PluginConfigurationException("Specified class '" + clazz.getName() + "' in the plugin.xml file does not implement "
+ "the IPluginInterface interface");
}
if (isAnnotated(clazz)) {
return loadFromPluginAnnotation(clazz); // depends on control dependency: [if], data = [none]
}
} catch (ClassNotFoundException e) {
throw new PluginConfigurationException(e.getMessage(), e);
}
// The class is not annotated not has an external definition file...
// Loading from current xml file...
String sDescription = getAttributeValue(plugin, "description", false);
@SuppressWarnings("unchecked")
PluginDefinition pluginDef = new PluginDefinition(getAttributeValue(plugin, "name", true), sDescription, clazz);
parseCommandLine(pluginDef, plugin);
return pluginDef;
} }
|
public class class_name {
public GetSessionResponse getSessionWithToken(String sessionId, Integer timeoutInMinute) {
GetSessionResponse getSessionResponse = getSession(sessionId);
if (timeoutInMinute == null) {
return getSessionResponse;
}
DateTime dateTime = new DateTime(DateTimeZone.UTC);
DateTime expireTime = dateTime.plusMinutes(timeoutInMinute);
DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
String expire = formatter.print(expireTime);
GetSecurityPolicyResponse getSecurityPolicyResponse = getSecurityPolicy(getSessionResponse.getSecurityPolicy());
Map<String, String> hlsUrls = new HashMap<String, String>();
Map<String, String> rtmpUrls = new HashMap<String, String>();
Map<String, String> flvUrls = new HashMap<String, String>();
if (getSecurityPolicyResponse.getAuth().getPlay()) {
if (getSessionResponse.getPlay().getHlsUrls() != null) {
for (Map.Entry<String, String> entry : getSessionResponse.getPlay().getHlsUrls().entrySet()) {
String line = entry.getKey();
String hlsUrl = entry.getValue();
if (hlsUrl != null) {
String hlsToken = null;
if (line.equals("L0")) {
hlsToken = LssUtils.hmacSha256(
String.format("/%s/live.m3u8;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
} else {
hlsToken = LssUtils.hmacSha256(
String.format("/%s-%s/live.m3u8;%s", sessionId, line, expire),
getSecurityPolicyResponse.getAuth().getKey());
}
if (hlsUrl.lastIndexOf('?') == -1) {
hlsUrl += String.format("?token=%s&expire=%s", hlsToken, expire);
} else {
hlsUrl += String.format("&token=%s&expire=%s", hlsToken, expire);
}
hlsUrls.put(line, hlsUrl);
}
}
getSessionResponse.getPlay().setHlsUrls(hlsUrls);
} else if (getSessionResponse.getPlay().getHlsUrl() != null) {
String hlsUrl = getSessionResponse.getPlay().getHlsUrl();
String hlsToken = LssUtils.hmacSha256(
String.format("/%s/live.m3u8;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
if (hlsUrl.lastIndexOf('?') == -1) {
hlsUrl += String.format("?token=%s&expire=%s", hlsToken, expire);
} else {
hlsUrl += String.format("&token=%s&expire=%s", hlsToken, expire);
}
getSessionResponse.getPlay().setHlsUrl(hlsUrl);
}
if (getSessionResponse.getPlay().getRtmpUrls() != null) {
for (Map.Entry<String, String> entry : getSessionResponse.getPlay().getRtmpUrls().entrySet()) {
String line = entry.getKey();
String rtmpUrl = entry.getValue();
if (rtmpUrl != null) {
String rtmpToken = LssUtils.hmacSha256(
String.format("%s;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
rtmpUrl += String.format("?token=%s&expire=%s", rtmpToken, expire);
}
rtmpUrls.put(line, rtmpUrl);
}
getSessionResponse.getPlay().setRtmpUrls(rtmpUrls);
} else if (getSessionResponse.getPlay().getRtmpUrl() != null) {
String rtmpUrl = getSessionResponse.getPlay().getRtmpUrl();
String rtmpToken = LssUtils.hmacSha256(
String.format("%s;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
rtmpUrl += String.format("?token=%s&expire=%s", rtmpToken, expire);
getSessionResponse.getPlay().setRtmpUrl(rtmpUrl);
}
if (getSessionResponse.getPlay().getFlvUrls() != null) {
for (Map.Entry<String, String> entry : getSessionResponse.getPlay().getFlvUrls().entrySet()) {
String line = entry.getKey();
String flvUrl = entry.getValue();
if (flvUrl != null) {
String flvToken = LssUtils.hmacSha256(
String.format("%s;%s", flvUrl, expire),
getSecurityPolicyResponse.getAuth().getKey());
flvUrl += String.format("?token=%s&expire=%s", flvToken, expire);
}
flvUrls.put(line, flvUrl);
}
getSessionResponse.getPlay().setFlvUrls(flvUrls);
} else if (getSessionResponse.getPlay().getFlvUrl() != null) {
String flvUrl = getSessionResponse.getPlay().getFlvUrl();
String flvToken = LssUtils.hmacSha256(
String.format("%s;%s", flvUrl, expire),
getSecurityPolicyResponse.getAuth().getKey());
flvUrl += String.format("?token=%s&expire=%s", flvToken, expire);
getSessionResponse.getPlay().setFlvUrl(flvUrl);
}
}
if (getSecurityPolicyResponse.getAuth().getPush()) {
String pushUrl = getSessionResponse.getPublish().getPushUrl();
String pushToken = LssUtils.hmacSha256(
String.format("%s;%s", getSessionResponse.getPublish().getPushStream(), expire),
getSecurityPolicyResponse.getAuth().getKey());
pushUrl += String.format("?token=%s&expire=%s", pushToken, expire);
getSessionResponse.getPublish().setPushUrl(pushUrl);
}
return getSessionResponse;
} }
|
public class class_name {
public GetSessionResponse getSessionWithToken(String sessionId, Integer timeoutInMinute) {
GetSessionResponse getSessionResponse = getSession(sessionId);
if (timeoutInMinute == null) {
return getSessionResponse; // depends on control dependency: [if], data = [none]
}
DateTime dateTime = new DateTime(DateTimeZone.UTC);
DateTime expireTime = dateTime.plusMinutes(timeoutInMinute);
DateTimeFormatter formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss'Z'");
String expire = formatter.print(expireTime);
GetSecurityPolicyResponse getSecurityPolicyResponse = getSecurityPolicy(getSessionResponse.getSecurityPolicy());
Map<String, String> hlsUrls = new HashMap<String, String>();
Map<String, String> rtmpUrls = new HashMap<String, String>();
Map<String, String> flvUrls = new HashMap<String, String>();
if (getSecurityPolicyResponse.getAuth().getPlay()) {
if (getSessionResponse.getPlay().getHlsUrls() != null) {
for (Map.Entry<String, String> entry : getSessionResponse.getPlay().getHlsUrls().entrySet()) {
String line = entry.getKey();
String hlsUrl = entry.getValue();
if (hlsUrl != null) {
String hlsToken = null;
if (line.equals("L0")) {
hlsToken = LssUtils.hmacSha256(
String.format("/%s/live.m3u8;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey()); // depends on control dependency: [if], data = [none]
} else {
hlsToken = LssUtils.hmacSha256(
String.format("/%s-%s/live.m3u8;%s", sessionId, line, expire),
getSecurityPolicyResponse.getAuth().getKey()); // depends on control dependency: [if], data = [none]
}
if (hlsUrl.lastIndexOf('?') == -1) {
hlsUrl += String.format("?token=%s&expire=%s", hlsToken, expire); // depends on control dependency: [if], data = [none]
} else {
hlsUrl += String.format("&token=%s&expire=%s", hlsToken, expire); // depends on control dependency: [if], data = [none]
}
hlsUrls.put(line, hlsUrl); // depends on control dependency: [if], data = [none]
}
}
getSessionResponse.getPlay().setHlsUrls(hlsUrls); // depends on control dependency: [if], data = [none]
} else if (getSessionResponse.getPlay().getHlsUrl() != null) {
String hlsUrl = getSessionResponse.getPlay().getHlsUrl();
String hlsToken = LssUtils.hmacSha256(
String.format("/%s/live.m3u8;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
if (hlsUrl.lastIndexOf('?') == -1) {
hlsUrl += String.format("?token=%s&expire=%s", hlsToken, expire); // depends on control dependency: [if], data = [none]
} else {
hlsUrl += String.format("&token=%s&expire=%s", hlsToken, expire); // depends on control dependency: [if], data = [none]
}
getSessionResponse.getPlay().setHlsUrl(hlsUrl); // depends on control dependency: [if], data = [none]
}
if (getSessionResponse.getPlay().getRtmpUrls() != null) {
for (Map.Entry<String, String> entry : getSessionResponse.getPlay().getRtmpUrls().entrySet()) {
String line = entry.getKey();
String rtmpUrl = entry.getValue();
if (rtmpUrl != null) {
String rtmpToken = LssUtils.hmacSha256(
String.format("%s;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
rtmpUrl += String.format("?token=%s&expire=%s", rtmpToken, expire); // depends on control dependency: [if], data = [none]
}
rtmpUrls.put(line, rtmpUrl); // depends on control dependency: [for], data = [none]
}
getSessionResponse.getPlay().setRtmpUrls(rtmpUrls); // depends on control dependency: [if], data = [none]
} else if (getSessionResponse.getPlay().getRtmpUrl() != null) {
String rtmpUrl = getSessionResponse.getPlay().getRtmpUrl();
String rtmpToken = LssUtils.hmacSha256(
String.format("%s;%s", sessionId, expire),
getSecurityPolicyResponse.getAuth().getKey());
rtmpUrl += String.format("?token=%s&expire=%s", rtmpToken, expire); // depends on control dependency: [if], data = [none]
getSessionResponse.getPlay().setRtmpUrl(rtmpUrl); // depends on control dependency: [if], data = [none]
}
if (getSessionResponse.getPlay().getFlvUrls() != null) {
for (Map.Entry<String, String> entry : getSessionResponse.getPlay().getFlvUrls().entrySet()) {
String line = entry.getKey();
String flvUrl = entry.getValue();
if (flvUrl != null) {
String flvToken = LssUtils.hmacSha256(
String.format("%s;%s", flvUrl, expire),
getSecurityPolicyResponse.getAuth().getKey());
flvUrl += String.format("?token=%s&expire=%s", flvToken, expire); // depends on control dependency: [if], data = [none]
}
flvUrls.put(line, flvUrl); // depends on control dependency: [for], data = [none]
}
getSessionResponse.getPlay().setFlvUrls(flvUrls); // depends on control dependency: [if], data = [none]
} else if (getSessionResponse.getPlay().getFlvUrl() != null) {
String flvUrl = getSessionResponse.getPlay().getFlvUrl();
String flvToken = LssUtils.hmacSha256(
String.format("%s;%s", flvUrl, expire),
getSecurityPolicyResponse.getAuth().getKey());
flvUrl += String.format("?token=%s&expire=%s", flvToken, expire); // depends on control dependency: [if], data = [none]
getSessionResponse.getPlay().setFlvUrl(flvUrl); // depends on control dependency: [if], data = [none]
}
}
if (getSecurityPolicyResponse.getAuth().getPush()) {
String pushUrl = getSessionResponse.getPublish().getPushUrl();
String pushToken = LssUtils.hmacSha256(
String.format("%s;%s", getSessionResponse.getPublish().getPushStream(), expire),
getSecurityPolicyResponse.getAuth().getKey());
pushUrl += String.format("?token=%s&expire=%s", pushToken, expire); // depends on control dependency: [if], data = [none]
getSessionResponse.getPublish().setPushUrl(pushUrl); // depends on control dependency: [if], data = [none]
}
return getSessionResponse;
} }
|
public class class_name {
@Check
public void checkFieldNameShadowing(SarlField field) {
if (!isIgnored(VARIABLE_NAME_SHADOWING)
&& !Utils.isHiddenMember(field.getName())) {
final JvmField inferredField = this.associations.getJvmField(field);
final Map<String, JvmField> inheritedFields = new TreeMap<>();
Utils.populateInheritanceContext(
inferredField.getDeclaringType(),
null, null,
inheritedFields,
null, null,
this.sarlActionSignatures);
final JvmField inheritedField = inheritedFields.get(field.getName());
if (inheritedField != null) {
int nameIndex = 0;
String newName = field.getName() + nameIndex;
while (inheritedFields.containsKey(newName)) {
++nameIndex;
newName = field.getName() + nameIndex;
}
addIssue(MessageFormat.format(
Messages.SARLValidator_42,
field.getName(),
inferredField.getDeclaringType().getQualifiedName(),
inheritedField.getQualifiedName()),
field,
XTEND_FIELD__NAME,
ValidationMessageAcceptor.INSIGNIFICANT_INDEX,
VARIABLE_NAME_SHADOWING,
newName);
}
}
} }
|
public class class_name {
@Check
public void checkFieldNameShadowing(SarlField field) {
if (!isIgnored(VARIABLE_NAME_SHADOWING)
&& !Utils.isHiddenMember(field.getName())) {
final JvmField inferredField = this.associations.getJvmField(field);
final Map<String, JvmField> inheritedFields = new TreeMap<>();
Utils.populateInheritanceContext(
inferredField.getDeclaringType(),
null, null,
inheritedFields,
null, null,
this.sarlActionSignatures); // depends on control dependency: [if], data = [none]
final JvmField inheritedField = inheritedFields.get(field.getName());
if (inheritedField != null) {
int nameIndex = 0;
String newName = field.getName() + nameIndex;
while (inheritedFields.containsKey(newName)) {
++nameIndex; // depends on control dependency: [while], data = [none]
newName = field.getName() + nameIndex; // depends on control dependency: [while], data = [none]
}
addIssue(MessageFormat.format(
Messages.SARLValidator_42,
field.getName(),
inferredField.getDeclaringType().getQualifiedName(),
inheritedField.getQualifiedName()),
field,
XTEND_FIELD__NAME,
ValidationMessageAcceptor.INSIGNIFICANT_INDEX,
VARIABLE_NAME_SHADOWING,
newName); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
public static AccrueType getInstance(String type, Locale locale)
{
AccrueType result = null;
String[] typeNames = LocaleData.getStringArray(locale, LocaleData.ACCRUE_TYPES);
for (int loop = 0; loop < typeNames.length; loop++)
{
if (typeNames[loop].equalsIgnoreCase(type) == true)
{
result = AccrueType.getInstance(loop + 1);
break;
}
}
if (result == null)
{
result = AccrueType.PRORATED;
}
return (result);
} }
|
public class class_name {
public static AccrueType getInstance(String type, Locale locale)
{
AccrueType result = null;
String[] typeNames = LocaleData.getStringArray(locale, LocaleData.ACCRUE_TYPES);
for (int loop = 0; loop < typeNames.length; loop++)
{
if (typeNames[loop].equalsIgnoreCase(type) == true)
{
result = AccrueType.getInstance(loop + 1); // depends on control dependency: [if], data = [none]
break;
}
}
if (result == null)
{
result = AccrueType.PRORATED; // depends on control dependency: [if], data = [none]
}
return (result);
} }
|
public class class_name {
@Override
protected Tree determineNonTrivialHead(Tree t, Tree parent) {
String motherCat = tlp.basicCategory(t.label().value());
if (DEBUG) {
System.err.println("At " + motherCat + ", my parent is " + parent);
}
// do VPs with auxiliary as special case
if ((motherCat.equals("VP") || motherCat.equals("SQ") || motherCat.equals("SINV"))) {
Tree[] kids = t.children();
// try to find if there is an auxiliary verb
if (DEBUG) {
System.err.println("Semantic head finder: at VP");
System.err.println("Class is " + t.getClass().getName());
t.pennPrint(System.err);
//System.err.println("hasVerbalAuxiliary = " + hasVerbalAuxiliary(kids, verbalAuxiliaries));
}
// looks for auxiliaries
if (hasVerbalAuxiliary(kids, verbalAuxiliaries) || hasPassiveProgressiveAuxiliary(kids, passiveAuxiliaries)) {
// String[] how = new String[] {"left", "VP", "ADJP", "NP"};
// Including NP etc seems okay for copular sentences but is
// problematic for other auxiliaries, like 'he has an answer'
// But maybe doing ADJP is fine!
String[] how = { "left", "VP", "ADJP" };
Tree pti = traverseLocate(kids, how, false);
if (DEBUG) {
System.err.println("Determined head (case 1) for " + t.value() + " is: " + pti);
}
if (pti != null) {
return pti;
} else {
// System.err.println("------");
// System.err.println("SemanticHeadFinder failed to reassign head for");
// t.pennPrint(System.err);
// System.err.println("------");
}
}
// looks for copular verbs
if (hasVerbalAuxiliary(kids, copulars) && ! isExistential(t, parent) && ! isWHQ(t, parent)) {
String[] how;
if (motherCat.equals("SQ")) {
how = new String[]{"right", "VP", "ADJP", "NP", "WHADJP", "WHNP"};
} else {
how = new String[]{"left", "VP", "ADJP", "NP", "WHADJP", "WHNP"};
}
Tree pti = traverseLocate(kids, how, false);
// don't allow a temporal to become head
if (pti != null && pti.label() != null && pti.label().value().contains("-TMP")) {
pti = null;
}
// In SQ, only allow an NP to become head if there is another one to the left (then it's probably predicative)
if (motherCat.equals("SQ") && pti != null && pti.label() != null && pti.label().value().startsWith("NP")) {
boolean foundAnotherNp = false;
for (Tree kid : kids) {
if (kid == pti) {
break;
} else if (kid.label() != null && kid.label().value().startsWith("NP")) {
foundAnotherNp = true;
break;
}
}
if ( ! foundAnotherNp) {
pti = null;
}
}
if (DEBUG) {
System.err.println("Determined head (case 2) for " + t.value() + " is: " + pti);
}
if (pti != null) {
return pti;
} else {
if (DEBUG) {
System.err.println("------");
System.err.println("SemanticHeadFinder failed to reassign head for");
t.pennPrint(System.err);
System.err.println("------");
}
}
}
}
Tree hd = super.determineNonTrivialHead(t, parent);
// Heuristically repair punctuation heads
Tree[] hdChildren = hd.children();
if (hdChildren != null && hdChildren.length > 0 &&
hdChildren[0].isLeaf()) {
if (tlp.isPunctuationWord(hdChildren[0].label().value())) {
Tree[] tChildren = t.children();
if (DEBUG) {
System.err.printf("head is punct: %s\n", hdChildren[0].label());
}
for (int i = tChildren.length - 1; i >= 0; i--) {
if (!tlp.isPunctuationWord(tChildren[i].children()[0].label().value())) {
hd = tChildren[i];
if (DEBUG) {
System.err.printf("New head: %s %s", hd.label(), hd.children()[0].label());
}
break;
}
}
}
}
if (DEBUG) {
System.err.println("Determined head (case 3) for " + t.value() + " is: " + hd);
}
return hd;
} }
|
public class class_name {
@Override
protected Tree determineNonTrivialHead(Tree t, Tree parent) {
String motherCat = tlp.basicCategory(t.label().value());
if (DEBUG) {
System.err.println("At " + motherCat + ", my parent is " + parent);
// depends on control dependency: [if], data = [none]
}
// do VPs with auxiliary as special case
if ((motherCat.equals("VP") || motherCat.equals("SQ") || motherCat.equals("SINV"))) {
Tree[] kids = t.children();
// try to find if there is an auxiliary verb
if (DEBUG) {
System.err.println("Semantic head finder: at VP");
// depends on control dependency: [if], data = [none]
System.err.println("Class is " + t.getClass().getName());
// depends on control dependency: [if], data = [none]
t.pennPrint(System.err);
// depends on control dependency: [if], data = [none]
//System.err.println("hasVerbalAuxiliary = " + hasVerbalAuxiliary(kids, verbalAuxiliaries));
}
// looks for auxiliaries
if (hasVerbalAuxiliary(kids, verbalAuxiliaries) || hasPassiveProgressiveAuxiliary(kids, passiveAuxiliaries)) {
// String[] how = new String[] {"left", "VP", "ADJP", "NP"};
// Including NP etc seems okay for copular sentences but is
// problematic for other auxiliaries, like 'he has an answer'
// But maybe doing ADJP is fine!
String[] how = { "left", "VP", "ADJP" };
Tree pti = traverseLocate(kids, how, false);
if (DEBUG) {
System.err.println("Determined head (case 1) for " + t.value() + " is: " + pti);
// depends on control dependency: [if], data = [none]
}
if (pti != null) {
return pti;
// depends on control dependency: [if], data = [none]
} else {
// System.err.println("------");
// System.err.println("SemanticHeadFinder failed to reassign head for");
// t.pennPrint(System.err);
// System.err.println("------");
}
}
// looks for copular verbs
if (hasVerbalAuxiliary(kids, copulars) && ! isExistential(t, parent) && ! isWHQ(t, parent)) {
String[] how;
if (motherCat.equals("SQ")) {
how = new String[]{"right", "VP", "ADJP", "NP", "WHADJP", "WHNP"};
// depends on control dependency: [if], data = [none]
} else {
how = new String[]{"left", "VP", "ADJP", "NP", "WHADJP", "WHNP"};
// depends on control dependency: [if], data = [none]
}
Tree pti = traverseLocate(kids, how, false);
// don't allow a temporal to become head
if (pti != null && pti.label() != null && pti.label().value().contains("-TMP")) {
pti = null;
// depends on control dependency: [if], data = [none]
}
// In SQ, only allow an NP to become head if there is another one to the left (then it's probably predicative)
if (motherCat.equals("SQ") && pti != null && pti.label() != null && pti.label().value().startsWith("NP")) {
boolean foundAnotherNp = false;
for (Tree kid : kids) {
if (kid == pti) {
break;
} else if (kid.label() != null && kid.label().value().startsWith("NP")) {
foundAnotherNp = true;
// depends on control dependency: [if], data = [none]
break;
}
}
if ( ! foundAnotherNp) {
pti = null;
// depends on control dependency: [if], data = [none]
}
}
if (DEBUG) {
System.err.println("Determined head (case 2) for " + t.value() + " is: " + pti);
// depends on control dependency: [if], data = [none]
}
if (pti != null) {
return pti;
// depends on control dependency: [if], data = [none]
} else {
if (DEBUG) {
System.err.println("------");
// depends on control dependency: [if], data = [none]
System.err.println("SemanticHeadFinder failed to reassign head for");
// depends on control dependency: [if], data = [none]
t.pennPrint(System.err);
// depends on control dependency: [if], data = [none]
System.err.println("------");
// depends on control dependency: [if], data = [none]
}
}
}
}
Tree hd = super.determineNonTrivialHead(t, parent);
// Heuristically repair punctuation heads
Tree[] hdChildren = hd.children();
if (hdChildren != null && hdChildren.length > 0 &&
hdChildren[0].isLeaf()) {
if (tlp.isPunctuationWord(hdChildren[0].label().value())) {
Tree[] tChildren = t.children();
if (DEBUG) {
System.err.printf("head is punct: %s\n", hdChildren[0].label());
// depends on control dependency: [if], data = [none]
}
for (int i = tChildren.length - 1; i >= 0; i--) {
if (!tlp.isPunctuationWord(tChildren[i].children()[0].label().value())) {
hd = tChildren[i];
// depends on control dependency: [if], data = [none]
if (DEBUG) {
System.err.printf("New head: %s %s", hd.label(), hd.children()[0].label());
// depends on control dependency: [if], data = [none]
}
break;
}
}
}
}
if (DEBUG) {
System.err.println("Determined head (case 3) for " + t.value() + " is: " + hd);
// depends on control dependency: [if], data = [none]
}
return hd;
} }
|
public class class_name {
private void configureServletContext(final ServletContext servletContext) {
servletContext.addListener(jodd.servlet.RequestContextListener.class);
if (decoraEnabled) {
final FilterRegistration filter = servletContext.addFilter("decora", jodd.decora.DecoraServletFilter.class);
filter.addMappingForUrlPatterns(null, true, contextPath);
}
final FilterRegistration filter = servletContext.addFilter("madvoc", jodd.madvoc.MadvocServletFilter.class);
filter.addMappingForUrlPatterns(madvocDispatcherTypes, true, contextPath);
} }
|
public class class_name {
private void configureServletContext(final ServletContext servletContext) {
servletContext.addListener(jodd.servlet.RequestContextListener.class);
if (decoraEnabled) {
final FilterRegistration filter = servletContext.addFilter("decora", jodd.decora.DecoraServletFilter.class);
filter.addMappingForUrlPatterns(null, true, contextPath); // depends on control dependency: [if], data = [none]
}
final FilterRegistration filter = servletContext.addFilter("madvoc", jodd.madvoc.MadvocServletFilter.class);
filter.addMappingForUrlPatterns(madvocDispatcherTypes, true, contextPath);
} }
|
public class class_name {
protected final Object getEventSessionMutex(HttpSession session) {
synchronized (WebUtils.getSessionMutex(session)) {
SerializableObject mutex =
(SerializableObject) session.getAttribute(EVENT_SESSION_MUTEX);
if (mutex == null) {
mutex = new SerializableObject();
session.setAttribute(EVENT_SESSION_MUTEX, mutex);
}
return mutex;
}
} }
|
public class class_name {
protected final Object getEventSessionMutex(HttpSession session) {
synchronized (WebUtils.getSessionMutex(session)) {
SerializableObject mutex =
(SerializableObject) session.getAttribute(EVENT_SESSION_MUTEX);
if (mutex == null) {
mutex = new SerializableObject(); // depends on control dependency: [if], data = [none]
session.setAttribute(EVENT_SESSION_MUTEX, mutex); // depends on control dependency: [if], data = [none]
}
return mutex;
}
} }
|
public class class_name {
@Override
protected NoSqlSession loadSession(final String clusterId)
{
log.debug("loadSession: loading: id=" + clusterId);
ISerializableSession data = getKey(clusterId);
log.debug("loadSession: loaded: id=" + clusterId + ", data=" + data);
if (data == null)
{
return null;
}
boolean valid = data.isValid();
if (!valid)
{
log.debug("loadSession: id=" + clusterId + ", data=" + data + " has been invalidated.");
return null;
}
if (!clusterId.equals(data.getId()))
{
log.warn("loadSession: invalid id (expected:" + clusterId + ", got:" + data.getId() + ")");
return null;
}
synchronized (_cookieDomain)
{
if (_cookieDomain != null && !data.getDomain().equals("*") && !_cookieDomain.equals(data.getDomain()))
{
log.warn("loadSession: invalid cookie domain (expected:" + _cookieDomain + ", got:" + data.getDomain()
+ ")");
return null;
}
}
synchronized (_cookiePath)
{
if (_cookiePath != null && !data.getPath().equals("*") && !_cookiePath.equals(data.getPath()))
{
log.warn("loadSession: invalid cookie path (expected:" + _cookiePath + ", got:" + data.getPath() + ")");
return null;
}
}
try
{
long version = data.getVersion();
long created = data.getCreationTime();
long accessed = data.getAccessed();
SmarterNoSqlSession session = new SmarterNoSqlSession(this, created, accessed, clusterId, version);
// get the attributes for the context
Enumeration<String> attrs = data.getAttributeNames();
// log.debug("attrs: " + Collections.list(attrs));
if (attrs != null)
{
while (attrs.hasMoreElements())
{
String name = attrs.nextElement();
Object value = data.getAttribute(name);
session.initializeAttribute(name, value);
session.bindValue(name, value);
}
}
session.didActivate();
return session;
}
catch (Exception e)
{
log.warn(e);
}
return null;
} }
|
public class class_name {
@Override
protected NoSqlSession loadSession(final String clusterId)
{
log.debug("loadSession: loading: id=" + clusterId);
ISerializableSession data = getKey(clusterId);
log.debug("loadSession: loaded: id=" + clusterId + ", data=" + data);
if (data == null)
{
return null; // depends on control dependency: [if], data = [none]
}
boolean valid = data.isValid();
if (!valid)
{
log.debug("loadSession: id=" + clusterId + ", data=" + data + " has been invalidated."); // depends on control dependency: [if], data = [none]
return null; // depends on control dependency: [if], data = [none]
}
if (!clusterId.equals(data.getId()))
{
log.warn("loadSession: invalid id (expected:" + clusterId + ", got:" + data.getId() + ")");
return null;
}
synchronized (_cookieDomain)
{
if (_cookieDomain != null && !data.getDomain().equals("*") && !_cookieDomain.equals(data.getDomain()))
{
log.warn("loadSession: invalid cookie domain (expected:" + _cookieDomain + ", got:" + data.getDomain()
+ ")");
return null;
}
}
synchronized (_cookiePath)
{
if (_cookiePath != null && !data.getPath().equals("*") && !_cookiePath.equals(data.getPath()))
{
log.warn("loadSession: invalid cookie path (expected:" + _cookiePath + ", got:" + data.getPath() + ")");
return null;
}
}
try
{
long version = data.getVersion();
long created = data.getCreationTime();
long accessed = data.getAccessed();
SmarterNoSqlSession session = new SmarterNoSqlSession(this, created, accessed, clusterId, version);
// get the attributes for the context
Enumeration<String> attrs = data.getAttributeNames();
// log.debug("attrs: " + Collections.list(attrs));
if (attrs != null)
{
while (attrs.hasMoreElements())
{
String name = attrs.nextElement();
Object value = data.getAttribute(name);
session.initializeAttribute(name, value);
session.bindValue(name, value);
}
}
session.didActivate();
return session;
}
catch (Exception e)
{
log.warn(e);
}
return null;
} }
|
public class class_name {
public static void writeFile(File file, long val) throws IOException {
AtomicFileOutputStream fos = new AtomicFileOutputStream(file);
try {
fos.write(String.valueOf(val).getBytes(Charsets.UTF_8));
fos.write('\n');
fos.close();
fos = null;
} finally {
if (fos != null) {
fos.abort();
}
}
} }
|
public class class_name {
public static void writeFile(File file, long val) throws IOException {
AtomicFileOutputStream fos = new AtomicFileOutputStream(file);
try {
fos.write(String.valueOf(val).getBytes(Charsets.UTF_8));
fos.write('\n');
fos.close();
fos = null;
} finally {
if (fos != null) {
fos.abort(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
@GET
@Produces(MediaType.APPLICATION_XML)
public Response getAgreements(
@QueryParam("consumerId") String consumerId,
@QueryParam("providerId") String providerId,
@QueryParam("active") BooleanParam active) {
logger.debug("StartOf getAgreements - REQUEST for /agreements");
try{
AgreementHelper agreementRestService = getAgreementHelper();
String serializedAgreement = agreementRestService.getAgreements(consumerId, providerId, BooleanParam.getValue(active));
Response result = buildResponse(200, serializedAgreement);
logger.debug("EndOf getAgreements");
return result;
} catch (HelperException e) {
logger.info("getAgreements exception:"+e.getMessage());
return buildResponse(e);
}
} }
|
public class class_name {
@GET
@Produces(MediaType.APPLICATION_XML)
public Response getAgreements(
@QueryParam("consumerId") String consumerId,
@QueryParam("providerId") String providerId,
@QueryParam("active") BooleanParam active) {
logger.debug("StartOf getAgreements - REQUEST for /agreements");
try{
AgreementHelper agreementRestService = getAgreementHelper();
String serializedAgreement = agreementRestService.getAgreements(consumerId, providerId, BooleanParam.getValue(active));
Response result = buildResponse(200, serializedAgreement);
logger.debug("EndOf getAgreements"); // depends on control dependency: [try], data = [none]
return result; // depends on control dependency: [try], data = [none]
} catch (HelperException e) {
logger.info("getAgreements exception:"+e.getMessage());
return buildResponse(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public NatGateway withNatGatewayAddresses(NatGatewayAddress... natGatewayAddresses) {
if (this.natGatewayAddresses == null) {
setNatGatewayAddresses(new com.amazonaws.internal.SdkInternalList<NatGatewayAddress>(natGatewayAddresses.length));
}
for (NatGatewayAddress ele : natGatewayAddresses) {
this.natGatewayAddresses.add(ele);
}
return this;
} }
|
public class class_name {
public NatGateway withNatGatewayAddresses(NatGatewayAddress... natGatewayAddresses) {
if (this.natGatewayAddresses == null) {
setNatGatewayAddresses(new com.amazonaws.internal.SdkInternalList<NatGatewayAddress>(natGatewayAddresses.length)); // depends on control dependency: [if], data = [none]
}
for (NatGatewayAddress ele : natGatewayAddresses) {
this.natGatewayAddresses.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public static String[] split (String source, String sep)
{
// handle the special case of a zero-component source
if (isBlank(source)) {
return new String[0];
}
int tcount = 0, tpos = -1, tstart = 0;
// count up the number of tokens
while ((tpos = source.indexOf(sep, tpos+1)) != -1) {
tcount++;
}
String[] tokens = new String[tcount+1];
tpos = -1; tcount = 0;
// do the split
while ((tpos = source.indexOf(sep, tpos+1)) != -1) {
tokens[tcount] = source.substring(tstart, tpos);
tstart = tpos+1;
tcount++;
}
// grab the last token
tokens[tcount] = source.substring(tstart);
return tokens;
} }
|
public class class_name {
public static String[] split (String source, String sep)
{
// handle the special case of a zero-component source
if (isBlank(source)) {
return new String[0]; // depends on control dependency: [if], data = [none]
}
int tcount = 0, tpos = -1, tstart = 0;
// count up the number of tokens
while ((tpos = source.indexOf(sep, tpos+1)) != -1) {
tcount++; // depends on control dependency: [while], data = [none]
}
String[] tokens = new String[tcount+1];
tpos = -1; tcount = 0;
// do the split
while ((tpos = source.indexOf(sep, tpos+1)) != -1) {
tokens[tcount] = source.substring(tstart, tpos); // depends on control dependency: [while], data = [none]
tstart = tpos+1; // depends on control dependency: [while], data = [none]
tcount++; // depends on control dependency: [while], data = [none]
}
// grab the last token
tokens[tcount] = source.substring(tstart);
return tokens;
} }
|
public class class_name {
@Override
public void visitMethod(Method obj) {
if (getMethod().isSynthetic()) {
return;
}
methodTransType = getTransactionalType(obj);
if ((methodTransType != TransactionalType.NONE) && !obj.isPublic()) {
bugReporter
.reportBug(new BugInstance(this, BugType.JPAI_TRANSACTION_ON_NON_PUBLIC_METHOD.name(), NORMAL_PRIORITY).addClass(this).addMethod(cls, obj));
}
if ((methodTransType == TransactionalType.WRITE) && (runtimeExceptionClass != null)) {
try {
Set<JavaClass> annotatedRollBackExceptions = getAnnotatedRollbackExceptions(obj);
Set<JavaClass> declaredExceptions = getDeclaredExceptions(obj);
reportExceptionMismatch(obj, annotatedRollBackExceptions, declaredExceptions, false, BugType.JPAI_NON_SPECIFIED_TRANSACTION_EXCEPTION_HANDLING);
reportExceptionMismatch(obj, declaredExceptions, annotatedRollBackExceptions, true, BugType.JPAI_UNNECESSARY_TRANSACTION_EXCEPTION_HANDLING);
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
}
}
super.visitMethod(obj);
} }
|
public class class_name {
@Override
public void visitMethod(Method obj) {
if (getMethod().isSynthetic()) {
return; // depends on control dependency: [if], data = [none]
}
methodTransType = getTransactionalType(obj);
if ((methodTransType != TransactionalType.NONE) && !obj.isPublic()) {
bugReporter
.reportBug(new BugInstance(this, BugType.JPAI_TRANSACTION_ON_NON_PUBLIC_METHOD.name(), NORMAL_PRIORITY).addClass(this).addMethod(cls, obj)); // depends on control dependency: [if], data = [none]
}
if ((methodTransType == TransactionalType.WRITE) && (runtimeExceptionClass != null)) {
try {
Set<JavaClass> annotatedRollBackExceptions = getAnnotatedRollbackExceptions(obj);
Set<JavaClass> declaredExceptions = getDeclaredExceptions(obj);
reportExceptionMismatch(obj, annotatedRollBackExceptions, declaredExceptions, false, BugType.JPAI_NON_SPECIFIED_TRANSACTION_EXCEPTION_HANDLING); // depends on control dependency: [try], data = [none]
reportExceptionMismatch(obj, declaredExceptions, annotatedRollBackExceptions, true, BugType.JPAI_UNNECESSARY_TRANSACTION_EXCEPTION_HANDLING); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException cnfe) {
bugReporter.reportMissingClass(cnfe);
} // depends on control dependency: [catch], data = [none]
}
super.visitMethod(obj);
} }
|
public class class_name {
@Override
public boolean remove(Object object) {
for (int offset = 0; offset < this.size; offset++) {
if (this.storage[offset] == object) {
System.arraycopy(this.storage, offset + 1, this.storage, offset, this.size - (offset + 1));
this.storage[--this.size] = null;
return true;
}
}
return false;
} }
|
public class class_name {
@Override
public boolean remove(Object object) {
for (int offset = 0; offset < this.size; offset++) {
if (this.storage[offset] == object) {
System.arraycopy(this.storage, offset + 1, this.storage, offset, this.size - (offset + 1)); // depends on control dependency: [if], data = [none]
this.storage[--this.size] = null; // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
}
return false;
} }
|
public class class_name {
public void setOtaUpdateFiles(java.util.Collection<OTAUpdateFile> otaUpdateFiles) {
if (otaUpdateFiles == null) {
this.otaUpdateFiles = null;
return;
}
this.otaUpdateFiles = new java.util.ArrayList<OTAUpdateFile>(otaUpdateFiles);
} }
|
public class class_name {
public void setOtaUpdateFiles(java.util.Collection<OTAUpdateFile> otaUpdateFiles) {
if (otaUpdateFiles == null) {
this.otaUpdateFiles = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.otaUpdateFiles = new java.util.ArrayList<OTAUpdateFile>(otaUpdateFiles);
} }
|
public class class_name {
public void marshall(ListOutgoingTypedLinksRequest listOutgoingTypedLinksRequest, ProtocolMarshaller protocolMarshaller) {
if (listOutgoingTypedLinksRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getDirectoryArn(), DIRECTORYARN_BINDING);
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getObjectReference(), OBJECTREFERENCE_BINDING);
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getFilterAttributeRanges(), FILTERATTRIBUTERANGES_BINDING);
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getFilterTypedLink(), FILTERTYPEDLINK_BINDING);
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getConsistencyLevel(), CONSISTENCYLEVEL_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} }
|
public class class_name {
public void marshall(ListOutgoingTypedLinksRequest listOutgoingTypedLinksRequest, ProtocolMarshaller protocolMarshaller) {
if (listOutgoingTypedLinksRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getDirectoryArn(), DIRECTORYARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getObjectReference(), OBJECTREFERENCE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getFilterAttributeRanges(), FILTERATTRIBUTERANGES_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getFilterTypedLink(), FILTERTYPEDLINK_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listOutgoingTypedLinksRequest.getConsistencyLevel(), CONSISTENCYLEVEL_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private void compactAndRemove(final E[] buffer, final int mask, int removeHashIndex) {
// remove(9a): [9a,9b,10a,9c,10b,11a,null] -> [9b,9c,10a,10b,null,11a,null]
removeHashIndex = removeHashIndex & mask;
int j = removeHashIndex;
while(true) {
int k;
// skip elements which belong where they are
do {
// j := (j+1) modulo num_slots
j = (j + 1) & mask;
// if slot[j] is unoccupied exit
if (buffer[j] == null) {
// delete last duplicate slot
buffer[removeHashIndex] = null;
return;
}
// k := hash(slot[j].key) modulo num_slots
k = System.identityHashCode(buffer[j]) & mask;
// determine if k lies cyclically in [i,j]
// | i.k.j |
// |....j i.k.| or |.k..j i...|
}
while ( (removeHashIndex <= j) ?
((removeHashIndex < k) && (k <= j)) :
((removeHashIndex < k) || (k <= j)) );
// slot[removeHashIndex] := slot[j]
buffer[removeHashIndex] = buffer[j];
// removeHashIndex := j
removeHashIndex = j;
}
} }
|
public class class_name {
private void compactAndRemove(final E[] buffer, final int mask, int removeHashIndex) {
// remove(9a): [9a,9b,10a,9c,10b,11a,null] -> [9b,9c,10a,10b,null,11a,null]
removeHashIndex = removeHashIndex & mask;
int j = removeHashIndex;
while(true) {
int k;
// skip elements which belong where they are
do {
// j := (j+1) modulo num_slots
j = (j + 1) & mask;
// if slot[j] is unoccupied exit
if (buffer[j] == null) {
// delete last duplicate slot
buffer[removeHashIndex] = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// k := hash(slot[j].key) modulo num_slots
k = System.identityHashCode(buffer[j]) & mask;
// determine if k lies cyclically in [i,j]
// | i.k.j |
// |....j i.k.| or |.k..j i...|
}
while ( (removeHashIndex <= j) ?
((removeHashIndex < k) && (k <= j)) :
((removeHashIndex < k) || (k <= j)) );
// slot[removeHashIndex] := slot[j]
buffer[removeHashIndex] = buffer[j]; // depends on control dependency: [while], data = [none]
// removeHashIndex := j
removeHashIndex = j; // depends on control dependency: [while], data = [none]
}
} }
|
public class class_name {
@SuppressWarnings("null")
@Override
public byte[] toByteArray() {
byte[][] summariesBytes = null;
int summariesBytesLength = 0;
if (count_ > 0) {
summariesBytes = new byte[count_][];
int i = 0;
for (int j = 0; j < summaries_.length; j++) {
if (summaries_[j] != null) {
summariesBytes[i] = summaries_[j].toByteArray();
summariesBytesLength += summariesBytes[i].length;
i++;
}
}
}
int sizeBytes =
Byte.BYTES // preamble longs
+ Byte.BYTES // serial version
+ Byte.BYTES // family
+ Byte.BYTES // sketch type
+ Byte.BYTES // flags
+ Byte.BYTES // log2(nomEntries)
+ Byte.BYTES // log2(currentCapacity)
+ Byte.BYTES; // log2(resizeFactor)
if (isInSamplingMode()) {
sizeBytes += Float.BYTES; // samplingProbability
}
final boolean isThetaIncluded = isInSamplingMode()
? theta_ < samplingProbability_ : theta_ < Long.MAX_VALUE;
if (isThetaIncluded) {
sizeBytes += Long.BYTES;
}
if (count_ > 0) {
sizeBytes += Integer.BYTES; // count
}
sizeBytes += (Long.BYTES * count_) + summariesBytesLength;
final byte[] bytes = new byte[sizeBytes];
int offset = 0;
bytes[offset++] = PREAMBLE_LONGS;
bytes[offset++] = serialVersionUID;
bytes[offset++] = (byte) Family.TUPLE.getID();
bytes[offset++] = (byte) SerializerDeserializer.SketchType.QuickSelectSketch.ordinal();
final boolean isBigEndian = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN);
bytes[offset++] = (byte) (
(isBigEndian ? 1 << Flags.IS_BIG_ENDIAN.ordinal() : 0)
| (isInSamplingMode() ? 1 << Flags.IS_IN_SAMPLING_MODE.ordinal() : 0)
| (isEmpty_ ? 1 << Flags.IS_EMPTY.ordinal() : 0)
| (count_ > 0 ? 1 << Flags.HAS_ENTRIES.ordinal() : 0)
| (isThetaIncluded ? 1 << Flags.IS_THETA_INCLUDED.ordinal() : 0)
);
bytes[offset++] = (byte) Integer.numberOfTrailingZeros(nomEntries_);
bytes[offset++] = (byte) lgCurrentCapacity_;
bytes[offset++] = (byte) lgResizeFactor_;
if (samplingProbability_ < 1f) {
ByteArrayUtil.putFloatLE(bytes, offset, samplingProbability_);
offset += Float.BYTES;
}
if (isThetaIncluded) {
ByteArrayUtil.putLongLE(bytes, offset, theta_);
offset += Long.BYTES;
}
if (count_ > 0) {
ByteArrayUtil.putIntLE(bytes, offset, count_);
offset += Integer.BYTES;
}
if (count_ > 0) {
int i = 0;
for (int j = 0; j < keys_.length; j++) {
if (summaries_[j] != null) {
ByteArrayUtil.putLongLE(bytes, offset, keys_[j]);
offset += Long.BYTES;
System.arraycopy(summariesBytes[i], 0, bytes, offset, summariesBytes[i].length);
offset += summariesBytes[i].length;
i++;
}
}
}
return bytes;
} }
|
public class class_name {
@SuppressWarnings("null")
@Override
public byte[] toByteArray() {
byte[][] summariesBytes = null;
int summariesBytesLength = 0;
if (count_ > 0) {
summariesBytes = new byte[count_][]; // depends on control dependency: [if], data = [none]
int i = 0;
for (int j = 0; j < summaries_.length; j++) {
if (summaries_[j] != null) {
summariesBytes[i] = summaries_[j].toByteArray(); // depends on control dependency: [if], data = [none]
summariesBytesLength += summariesBytes[i].length; // depends on control dependency: [if], data = [none]
i++; // depends on control dependency: [if], data = [none]
}
}
}
int sizeBytes =
Byte.BYTES // preamble longs
+ Byte.BYTES // serial version
+ Byte.BYTES // family
+ Byte.BYTES // sketch type
+ Byte.BYTES // flags
+ Byte.BYTES // log2(nomEntries)
+ Byte.BYTES // log2(currentCapacity)
+ Byte.BYTES; // log2(resizeFactor)
if (isInSamplingMode()) {
sizeBytes += Float.BYTES; // samplingProbability // depends on control dependency: [if], data = [none]
}
final boolean isThetaIncluded = isInSamplingMode()
? theta_ < samplingProbability_ : theta_ < Long.MAX_VALUE;
if (isThetaIncluded) {
sizeBytes += Long.BYTES; // depends on control dependency: [if], data = [none]
}
if (count_ > 0) {
sizeBytes += Integer.BYTES; // count // depends on control dependency: [if], data = [none]
}
sizeBytes += (Long.BYTES * count_) + summariesBytesLength;
final byte[] bytes = new byte[sizeBytes];
int offset = 0;
bytes[offset++] = PREAMBLE_LONGS;
bytes[offset++] = serialVersionUID;
bytes[offset++] = (byte) Family.TUPLE.getID();
bytes[offset++] = (byte) SerializerDeserializer.SketchType.QuickSelectSketch.ordinal();
final boolean isBigEndian = ByteOrder.nativeOrder().equals(ByteOrder.BIG_ENDIAN);
bytes[offset++] = (byte) (
(isBigEndian ? 1 << Flags.IS_BIG_ENDIAN.ordinal() : 0)
| (isInSamplingMode() ? 1 << Flags.IS_IN_SAMPLING_MODE.ordinal() : 0)
| (isEmpty_ ? 1 << Flags.IS_EMPTY.ordinal() : 0)
| (count_ > 0 ? 1 << Flags.HAS_ENTRIES.ordinal() : 0)
| (isThetaIncluded ? 1 << Flags.IS_THETA_INCLUDED.ordinal() : 0)
);
bytes[offset++] = (byte) Integer.numberOfTrailingZeros(nomEntries_);
bytes[offset++] = (byte) lgCurrentCapacity_;
bytes[offset++] = (byte) lgResizeFactor_;
if (samplingProbability_ < 1f) {
ByteArrayUtil.putFloatLE(bytes, offset, samplingProbability_); // depends on control dependency: [if], data = [none]
offset += Float.BYTES; // depends on control dependency: [if], data = [none]
}
if (isThetaIncluded) {
ByteArrayUtil.putLongLE(bytes, offset, theta_); // depends on control dependency: [if], data = [none]
offset += Long.BYTES; // depends on control dependency: [if], data = [none]
}
if (count_ > 0) {
ByteArrayUtil.putIntLE(bytes, offset, count_); // depends on control dependency: [if], data = [none]
offset += Integer.BYTES; // depends on control dependency: [if], data = [none]
}
if (count_ > 0) {
int i = 0;
for (int j = 0; j < keys_.length; j++) {
if (summaries_[j] != null) {
ByteArrayUtil.putLongLE(bytes, offset, keys_[j]); // depends on control dependency: [if], data = [none]
offset += Long.BYTES; // depends on control dependency: [if], data = [none]
System.arraycopy(summariesBytes[i], 0, bytes, offset, summariesBytes[i].length); // depends on control dependency: [if], data = [none]
offset += summariesBytes[i].length; // depends on control dependency: [if], data = [none]
i++; // depends on control dependency: [if], data = [none]
}
}
}
return bytes;
} }
|
public class class_name {
public static Map<Object, Object> getSupportedProperties(Object ref) {
if (null == supportedMIMETypes) {
synchronized (MIMETypesSupport.class) {
if (null == supportedMIMETypes) {
// Load the supported MIME types out of a properties file
try (InputStream is = ClassLoaderResourceUtils.getResourceAsStream(MIME_PROPS_LOCATION, ref)) {
supportedMIMETypes = new Properties();
supportedMIMETypes.load(is);
} catch (FileNotFoundException e) {
throw new BundlingProcessException(
"Error retrieving " + MIME_PROPS_LOCATION + ". Please check your classloader settings");
} catch (IOException e) {
throw new BundlingProcessException(
"Error retrieving " + MIME_PROPS_LOCATION + ". Please check your classloader settings");
}
}
}
}
return supportedMIMETypes;
} }
|
public class class_name {
public static Map<Object, Object> getSupportedProperties(Object ref) {
if (null == supportedMIMETypes) {
synchronized (MIMETypesSupport.class) { // depends on control dependency: [if], data = [none]
if (null == supportedMIMETypes) {
// Load the supported MIME types out of a properties file
try (InputStream is = ClassLoaderResourceUtils.getResourceAsStream(MIME_PROPS_LOCATION, ref)) {
supportedMIMETypes = new Properties();
supportedMIMETypes.load(is); // depends on control dependency: [if], data = [none]
} catch (FileNotFoundException e) {
throw new BundlingProcessException(
"Error retrieving " + MIME_PROPS_LOCATION + ". Please check your classloader settings");
} catch (IOException e) {
throw new BundlingProcessException(
"Error retrieving " + MIME_PROPS_LOCATION + ". Please check your classloader settings");
}
}
}
}
return supportedMIMETypes;
} }
|
public class class_name {
protected Method analyse_method_state(String cl_name,String state_method) throws DevFailed
{
Method s_meth = null;
try
{
//
// Get the class object for the device class
//
StringBuffer str = new StringBuffer(cl_name);
str.append(".");
str.append(cl_name);
Class cl = Class.forName(str.toString());
//
// Get the device object method list
//
Method[] meth_list = cl.getDeclaredMethods();
if (meth_list.length == 0)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Can't find method ");
mess.append(state_method);
Except.throw_exception("API_MethodNotFound",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Find the state method in method list
//
s_meth = find_method(meth_list,state_method);
//
// Check if it is public
//
if (Modifier.isPublic(s_meth.getModifiers()) != true)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": The method ");
mess.append(state_method);
mess.append(" is not public");
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Check its argument
//
Class[] s_param_type = s_meth.getParameterTypes();
if (s_param_type.length != 1)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Wrong number of argument defined for method ");
mess.append(state_method);
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
Class any_class = Class.forName("org.omg.CORBA.Any");
if (s_param_type[0].equals(any_class) != true)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Incorrect argument type for method ");
mess.append(state_method);
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Check method return type
//
Class s_ret_type = s_meth.getReturnType();
if (s_ret_type.equals(Boolean.TYPE) != true)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Bad return type for method ");
mess.append(state_method);
mess.append(". Should be boolean");
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
}
catch (ClassNotFoundException ex)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Can't find class ");
mess.append(cl_name);
Except.throw_exception("API_ClassNotFound",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
catch (SecurityException ex)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Security violation when trying to retrieve method list for class ");
mess.append(cl_name);
Except.throw_exception("API_JavaRuntimeSecurityException",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Return the method reference
//
return s_meth;
} }
|
public class class_name {
protected Method analyse_method_state(String cl_name,String state_method) throws DevFailed
{
Method s_meth = null;
try
{
//
// Get the class object for the device class
//
StringBuffer str = new StringBuffer(cl_name);
str.append(".");
str.append(cl_name);
Class cl = Class.forName(str.toString());
//
// Get the device object method list
//
Method[] meth_list = cl.getDeclaredMethods();
if (meth_list.length == 0)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name); // depends on control dependency: [if], data = [none]
mess.append(": Can't find method ");
mess.append(state_method);
Except.throw_exception("API_MethodNotFound",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Find the state method in method list
//
s_meth = find_method(meth_list,state_method);
//
// Check if it is public
//
if (Modifier.isPublic(s_meth.getModifiers()) != true)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": The method ");
mess.append(state_method);
mess.append(" is not public");
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Check its argument
//
Class[] s_param_type = s_meth.getParameterTypes();
if (s_param_type.length != 1)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Wrong number of argument defined for method ");
mess.append(state_method);
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
Class any_class = Class.forName("org.omg.CORBA.Any");
if (s_param_type[0].equals(any_class) != true)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Incorrect argument type for method ");
mess.append(state_method);
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Check method return type
//
Class s_ret_type = s_meth.getReturnType();
if (s_ret_type.equals(Boolean.TYPE) != true)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Bad return type for method ");
mess.append(state_method);
mess.append(". Should be boolean");
Except.throw_exception("API_MethodArgument",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
}
catch (ClassNotFoundException ex)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Can't find class "); // depends on control dependency: [if], data = [none]
mess.append(cl_name); // depends on control dependency: [if], data = [none]
Except.throw_exception("API_ClassNotFound",
mess.toString(),
"TemplCommand.analyse_method_state()"); // depends on control dependency: [if], data = [none]
}
catch (SecurityException ex)
{
StringBuffer mess = new StringBuffer("Command ");
mess.append(name);
mess.append(": Security violation when trying to retrieve method list for class ");
mess.append(cl_name);
Except.throw_exception("API_JavaRuntimeSecurityException",
mess.toString(),
"TemplCommand.analyse_method_state()");
}
//
// Return the method reference
//
return s_meth;
} }
|
public class class_name {
public static File[] wrap(File[] pPaths) {
if (IS_WINDOWS) {
for (int i = 0; pPaths != null && i < pPaths.length; i++) {
pPaths[i] = wrap(pPaths[i]);
}
}
return pPaths;
} }
|
public class class_name {
public static File[] wrap(File[] pPaths) {
if (IS_WINDOWS) {
for (int i = 0; pPaths != null && i < pPaths.length; i++) {
pPaths[i] = wrap(pPaths[i]);
// depends on control dependency: [for], data = [i]
}
}
return pPaths;
} }
|
public class class_name {
@JustForTest
synchronized static void removeValue(String key) {
Object oldValue = CFG.get(key);
if (oldValue != null) {
CFG.remove(key);
List<RpcConfigListener> rpcConfigListeners = CFG_LISTENER.get(key);
if (CommonUtils.isNotEmpty(rpcConfigListeners)) {
for (RpcConfigListener rpcConfigListener : rpcConfigListeners) {
rpcConfigListener.onChange(oldValue, null);
}
}
}
} }
|
public class class_name {
@JustForTest
synchronized static void removeValue(String key) {
Object oldValue = CFG.get(key);
if (oldValue != null) {
CFG.remove(key); // depends on control dependency: [if], data = [none]
List<RpcConfigListener> rpcConfigListeners = CFG_LISTENER.get(key);
if (CommonUtils.isNotEmpty(rpcConfigListeners)) {
for (RpcConfigListener rpcConfigListener : rpcConfigListeners) {
rpcConfigListener.onChange(oldValue, null); // depends on control dependency: [for], data = [rpcConfigListener]
}
}
}
} }
|
public class class_name {
public RestRequestInformation asRestRequestInformation() {
try {
return new RestRequestInformationImpl(
api, new URL(endpoint.getFullUrl(urlParameters)), queryParameters, headers, body);
} catch (MalformedURLException e) {
throw new AssertionError(e);
}
} }
|
public class class_name {
public RestRequestInformation asRestRequestInformation() {
try {
return new RestRequestInformationImpl(
api, new URL(endpoint.getFullUrl(urlParameters)), queryParameters, headers, body); // depends on control dependency: [try], data = [none]
} catch (MalformedURLException e) {
throw new AssertionError(e);
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public static HashValue murmurhash3_x64_128(final byte[] key, final int offset, final int len, final int seed) {
// The original algorithm does have a 32 bit unsigned seed.
// We have to mask to match the behavior of the unsigned types and prevent sign extension.
long h1 = seed & 0x00000000FFFFFFFFL;
long h2 = seed & 0x00000000FFFFFFFFL;
final long c1 = 0x87c37b91114253d5L;
final long c2 = 0x4cf5ad432745937fL;
int roundedEnd = offset + (len & 0xFFFFFFF0); // round down to 16 byte block
for (int i = offset; i < roundedEnd; i += 16) {
long k1 = getLongLittleEndian(key, i);
long k2 = getLongLittleEndian(key, i + 8);
k1 *= c1;
k1 = Long.rotateLeft(k1, 31);
k1 *= c2;
h1 ^= k1;
h1 = Long.rotateLeft(h1, 27);
h1 += h2;
h1 = h1 * 5 + 0x52dce729;
k2 *= c2;
k2 = Long.rotateLeft(k2, 33);
k2 *= c1;
h2 ^= k2;
h2 = Long.rotateLeft(h2, 31);
h2 += h1;
h2 = h2 * 5 + 0x38495ab5;
}
long k1 = 0;
long k2 = 0;
switch (len & 15) {
case 15:
k2 = (key[roundedEnd + 14] & 0xffL) << 48;
case 14:
k2 |= (key[roundedEnd + 13] & 0xffL) << 40;
case 13:
k2 |= (key[roundedEnd + 12] & 0xffL) << 32;
case 12:
k2 |= (key[roundedEnd + 11] & 0xffL) << 24;
case 11:
k2 |= (key[roundedEnd + 10] & 0xffL) << 16;
case 10:
k2 |= (key[roundedEnd + 9] & 0xffL) << 8;
case 9:
k2 |= (key[roundedEnd + 8] & 0xffL);
k2 *= c2;
k2 = Long.rotateLeft(k2, 33);
k2 *= c1;
h2 ^= k2;
case 8:
k1 = ((long) key[roundedEnd + 7]) << 56;
case 7:
k1 |= (key[roundedEnd + 6] & 0xffL) << 48;
case 6:
k1 |= (key[roundedEnd + 5] & 0xffL) << 40;
case 5:
k1 |= (key[roundedEnd + 4] & 0xffL) << 32;
case 4:
k1 |= (key[roundedEnd + 3] & 0xffL) << 24;
case 3:
k1 |= (key[roundedEnd + 2] & 0xffL) << 16;
case 2:
k1 |= (key[roundedEnd + 1] & 0xffL) << 8;
case 1:
k1 |= (key[roundedEnd] & 0xffL);
k1 *= c1;
k1 = Long.rotateLeft(k1, 31);
k1 *= c2;
h1 ^= k1;
}
//----------
// finalization
h1 ^= len;
h2 ^= len;
h1 += h2;
h2 += h1;
h1 = fmix64(h1);
h2 = fmix64(h2);
h1 += h2;
h2 += h1;
return new HashValue(h1, h2);
} }
|
public class class_name {
public static HashValue murmurhash3_x64_128(final byte[] key, final int offset, final int len, final int seed) {
// The original algorithm does have a 32 bit unsigned seed.
// We have to mask to match the behavior of the unsigned types and prevent sign extension.
long h1 = seed & 0x00000000FFFFFFFFL;
long h2 = seed & 0x00000000FFFFFFFFL;
final long c1 = 0x87c37b91114253d5L;
final long c2 = 0x4cf5ad432745937fL;
int roundedEnd = offset + (len & 0xFFFFFFF0); // round down to 16 byte block
for (int i = offset; i < roundedEnd; i += 16) {
long k1 = getLongLittleEndian(key, i);
long k2 = getLongLittleEndian(key, i + 8);
k1 *= c1; // depends on control dependency: [for], data = [none]
k1 = Long.rotateLeft(k1, 31); // depends on control dependency: [for], data = [none]
k1 *= c2; // depends on control dependency: [for], data = [none]
h1 ^= k1; // depends on control dependency: [for], data = [none]
h1 = Long.rotateLeft(h1, 27); // depends on control dependency: [for], data = [none]
h1 += h2; // depends on control dependency: [for], data = [none]
h1 = h1 * 5 + 0x52dce729; // depends on control dependency: [for], data = [none]
k2 *= c2; // depends on control dependency: [for], data = [none]
k2 = Long.rotateLeft(k2, 33); // depends on control dependency: [for], data = [none]
k2 *= c1; // depends on control dependency: [for], data = [none]
h2 ^= k2; // depends on control dependency: [for], data = [none]
h2 = Long.rotateLeft(h2, 31); // depends on control dependency: [for], data = [none]
h2 += h1; // depends on control dependency: [for], data = [none]
h2 = h2 * 5 + 0x38495ab5; // depends on control dependency: [for], data = [none]
}
long k1 = 0;
long k2 = 0;
switch (len & 15) {
case 15:
k2 = (key[roundedEnd + 14] & 0xffL) << 48;
case 14:
k2 |= (key[roundedEnd + 13] & 0xffL) << 40;
case 13:
k2 |= (key[roundedEnd + 12] & 0xffL) << 32;
case 12:
k2 |= (key[roundedEnd + 11] & 0xffL) << 24;
case 11:
k2 |= (key[roundedEnd + 10] & 0xffL) << 16;
case 10:
k2 |= (key[roundedEnd + 9] & 0xffL) << 8;
case 9:
k2 |= (key[roundedEnd + 8] & 0xffL);
k2 *= c2;
k2 = Long.rotateLeft(k2, 33);
k2 *= c1;
h2 ^= k2;
case 8:
k1 = ((long) key[roundedEnd + 7]) << 56;
case 7:
k1 |= (key[roundedEnd + 6] & 0xffL) << 48;
case 6:
k1 |= (key[roundedEnd + 5] & 0xffL) << 40;
case 5:
k1 |= (key[roundedEnd + 4] & 0xffL) << 32;
case 4:
k1 |= (key[roundedEnd + 3] & 0xffL) << 24;
case 3:
k1 |= (key[roundedEnd + 2] & 0xffL) << 16;
case 2:
k1 |= (key[roundedEnd + 1] & 0xffL) << 8;
case 1:
k1 |= (key[roundedEnd] & 0xffL);
k1 *= c1;
k1 = Long.rotateLeft(k1, 31);
k1 *= c2;
h1 ^= k1;
}
//----------
// finalization
h1 ^= len;
h2 ^= len;
h1 += h2;
h2 += h1;
h1 = fmix64(h1);
h2 = fmix64(h2);
h1 += h2;
h2 += h1;
return new HashValue(h1, h2);
} }
|
public class class_name {
private static String driverVersion()
{
// "Session" is arbitrary - the only thing that matters is that the class we use here is in the
// 'org.neo4j.driver' package, because that is where the jar manifest specifies the version.
// This is done as part of the build, adding a MANIFEST.MF file to the generated jarfile.
Package pkg = Session.class.getPackage();
if ( pkg != null && pkg.getImplementationVersion() != null )
{
return pkg.getImplementationVersion();
}
// If there is no version, we're not running from a jar file, but from raw compiled class files.
// This should only happen during development, so call the version 'dev'.
return "dev";
} }
|
public class class_name {
private static String driverVersion()
{
// "Session" is arbitrary - the only thing that matters is that the class we use here is in the
// 'org.neo4j.driver' package, because that is where the jar manifest specifies the version.
// This is done as part of the build, adding a MANIFEST.MF file to the generated jarfile.
Package pkg = Session.class.getPackage();
if ( pkg != null && pkg.getImplementationVersion() != null )
{
return pkg.getImplementationVersion(); // depends on control dependency: [if], data = [none]
}
// If there is no version, we're not running from a jar file, but from raw compiled class files.
// This should only happen during development, so call the version 'dev'.
return "dev";
} }
|
public class class_name {
Table ADMINISTRABLE_ROLE_AUTHORIZATIONS() {
Table t = sysTables[ADMINISTRABLE_ROLE_AUTHORIZATIONS];
if (t == null) {
t = createBlankTable(
sysTableHsqlNames[ADMINISTRABLE_ROLE_AUTHORIZATIONS]);
addColumn(t, "GRANTEE", SQL_IDENTIFIER);
addColumn(t, "ROLE_NAME", SQL_IDENTIFIER);
addColumn(t, "IS_GRANTABLE", SQL_IDENTIFIER);
HsqlName name = HsqlNameManager.newInfoSchemaObjectName(
sysTableHsqlNames[ADMINISTRABLE_ROLE_AUTHORIZATIONS].name,
false, SchemaObject.INDEX);
t.createPrimaryKey(name, new int[] {
0, 1, 2
}, false);
return t;
}
if (session.isAdmin()) {
insertRoles(t, session.getGrantee(), true);
}
return t;
} }
|
public class class_name {
Table ADMINISTRABLE_ROLE_AUTHORIZATIONS() {
Table t = sysTables[ADMINISTRABLE_ROLE_AUTHORIZATIONS];
if (t == null) {
t = createBlankTable(
sysTableHsqlNames[ADMINISTRABLE_ROLE_AUTHORIZATIONS]); // depends on control dependency: [if], data = [none]
addColumn(t, "GRANTEE", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "ROLE_NAME", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "IS_GRANTABLE", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
HsqlName name = HsqlNameManager.newInfoSchemaObjectName(
sysTableHsqlNames[ADMINISTRABLE_ROLE_AUTHORIZATIONS].name,
false, SchemaObject.INDEX);
t.createPrimaryKey(name, new int[] {
0, 1, 2
}, false); // depends on control dependency: [if], data = [none]
return t; // depends on control dependency: [if], data = [none]
}
if (session.isAdmin()) {
insertRoles(t, session.getGrantee(), true); // depends on control dependency: [if], data = [none]
}
return t;
} }
|
public class class_name {
public String getRequestURI()
{
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse();
}
// Begin PK06988, strip session id of when url rewriting is enabled
SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance();
if (reqData != null && reqData.getRequestURI() == null)
{
String aURI = getEncodedRequestURI();
if (aURI == null)
return null;
else
reqData.setRequestURI(WebGroup.stripURL(aURI));
}
// 321485
String uri = null;
if (reqData != null)
uri = reqData.getRequestURI();
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getRequestURI", " uri --> " + uri);
}
return uri;
// End PK06988, strip session id of when url rewriting is enabled
} }
|
public class class_name {
public String getRequestURI()
{
if (WCCustomProperties.CHECK_REQUEST_OBJECT_IN_USE){
checkRequestObjectInUse(); // depends on control dependency: [if], data = [none]
}
// Begin PK06988, strip session id of when url rewriting is enabled
SRTServletRequestThreadData reqData = SRTServletRequestThreadData.getInstance();
if (reqData != null && reqData.getRequestURI() == null)
{
String aURI = getEncodedRequestURI();
if (aURI == null)
return null;
else
reqData.setRequestURI(WebGroup.stripURL(aURI));
}
// 321485
String uri = null;
if (reqData != null)
uri = reqData.getRequestURI();
if (TraceComponent.isAnyTracingEnabled()&&logger.isLoggable (Level.FINE)) { //306998.15
logger.logp(Level.FINE, CLASS_NAME,"getRequestURI", " uri --> " + uri); // depends on control dependency: [if], data = [none]
}
return uri;
// End PK06988, strip session id of when url rewriting is enabled
} }
|
public class class_name {
private void genTypeSignature(TypeMirror type, StringBuilder sb) {
switch (type.getKind()) {
case BOOLEAN:
case BYTE:
case CHAR:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
case SHORT:
case VOID:
sb.append(TypeUtil.getBinaryName(type));
break;
case ARRAY:
// ArrayTypeSignature ::= "[" TypSignature.
sb.append('[');
genTypeSignature(((ArrayType) type).getComponentType(), sb);
break;
case DECLARED:
String typeName = elementUtil.getBinaryName(TypeUtil.asTypeElement(type));
if (!TypeUtil.isStubType(typeName)) {
// ClassTypeSignature ::= "L" {Ident "/"} Ident
// OptTypeArguments {"." Ident OptTypeArguments} ";".
sb.append('L');
sb.append(typeName.replace('.', '/'));
genOptTypeArguments(((DeclaredType) type).getTypeArguments(), sb);
sb.append(';');
}
break;
case TYPEVAR:
// TypeVariableSignature ::= "T" Ident ";".
sb.append('T');
sb.append(ElementUtil.getName(((TypeVariable) type).asElement()));
sb.append(';');
break;
case WILDCARD:
// TypeArgument ::= (["+" | "-"] FieldTypeSignature) | "*".
TypeMirror upperBound = ((WildcardType) type).getExtendsBound();
TypeMirror lowerBound = ((WildcardType) type).getSuperBound();
if (upperBound != null) {
sb.append('+');
genTypeSignature(upperBound, sb);
} else if (lowerBound != null) {
sb.append('-');
genTypeSignature(lowerBound, sb);
} else {
sb.append('*');
}
break;
default:
throw new AssertionError("Unexpected type kind: " + type.getKind());
}
} }
|
public class class_name {
private void genTypeSignature(TypeMirror type, StringBuilder sb) {
switch (type.getKind()) {
case BOOLEAN:
case BYTE:
case CHAR:
case DOUBLE:
case FLOAT:
case INT:
case LONG:
case SHORT:
case VOID:
sb.append(TypeUtil.getBinaryName(type));
break;
case ARRAY:
// ArrayTypeSignature ::= "[" TypSignature.
sb.append('[');
genTypeSignature(((ArrayType) type).getComponentType(), sb);
break;
case DECLARED:
String typeName = elementUtil.getBinaryName(TypeUtil.asTypeElement(type));
if (!TypeUtil.isStubType(typeName)) {
// ClassTypeSignature ::= "L" {Ident "/"} Ident
// OptTypeArguments {"." Ident OptTypeArguments} ";".
sb.append('L'); // depends on control dependency: [if], data = [none]
sb.append(typeName.replace('.', '/')); // depends on control dependency: [if], data = [none]
genOptTypeArguments(((DeclaredType) type).getTypeArguments(), sb); // depends on control dependency: [if], data = [none]
sb.append(';'); // depends on control dependency: [if], data = [none]
}
break;
case TYPEVAR:
// TypeVariableSignature ::= "T" Ident ";".
sb.append('T');
sb.append(ElementUtil.getName(((TypeVariable) type).asElement()));
sb.append(';');
break;
case WILDCARD:
// TypeArgument ::= (["+" | "-"] FieldTypeSignature) | "*".
TypeMirror upperBound = ((WildcardType) type).getExtendsBound();
TypeMirror lowerBound = ((WildcardType) type).getSuperBound();
if (upperBound != null) {
sb.append('+'); // depends on control dependency: [if], data = [none]
genTypeSignature(upperBound, sb); // depends on control dependency: [if], data = [(upperBound]
} else if (lowerBound != null) {
sb.append('-'); // depends on control dependency: [if], data = [none]
genTypeSignature(lowerBound, sb); // depends on control dependency: [if], data = [(lowerBound]
} else {
sb.append('*'); // depends on control dependency: [if], data = [none]
}
break;
default:
throw new AssertionError("Unexpected type kind: " + type.getKind());
}
} }
|
public class class_name {
public Response handle(
AuthleteApi api, UserInfoRequestHandlerSpi spi, String accessToken)
{
try
{
// Create a handler.
UserInfoRequestHandler handler = new UserInfoRequestHandler(api, spi);
// Delegate the task to the handler.
return handler.handle(accessToken);
}
catch (WebApplicationException e)
{
// An error occurred in the handler.
onError(e);
// Convert the error to a Response.
return e.getResponse();
}
} }
|
public class class_name {
public Response handle(
AuthleteApi api, UserInfoRequestHandlerSpi spi, String accessToken)
{
try
{
// Create a handler.
UserInfoRequestHandler handler = new UserInfoRequestHandler(api, spi);
// Delegate the task to the handler.
return handler.handle(accessToken); // depends on control dependency: [try], data = [none]
}
catch (WebApplicationException e)
{
// An error occurred in the handler.
onError(e);
// Convert the error to a Response.
return e.getResponse();
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
private JPanel getJPanel() {
if (jPanel == null) {
GridBagConstraints gridBagConstraints15 = new GridBagConstraints();
java.awt.GridBagConstraints gridBagConstraints13 = new GridBagConstraints();
javax.swing.JLabel jLabel2 = new JLabel();
java.awt.GridBagConstraints gridBagConstraints3 = new GridBagConstraints();
java.awt.GridBagConstraints gridBagConstraints2 = new GridBagConstraints();
jPanel = new JPanel();
jPanel.setLayout(new GridBagLayout());
jPanel.setPreferredSize(new java.awt.Dimension(400,400));
jPanel.setMinimumSize(new java.awt.Dimension(400,400));
gridBagConstraints2.gridx = 1;
gridBagConstraints2.gridy = 5;
gridBagConstraints2.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints2.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints3.gridx = 2;
gridBagConstraints3.gridy = 5;
gridBagConstraints3.insets = new java.awt.Insets(2,2,2,10);
gridBagConstraints3.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints13.gridx = 0;
gridBagConstraints13.gridy = 5;
gridBagConstraints13.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints13.weightx = 1.0D;
gridBagConstraints13.insets = new java.awt.Insets(2,10,2,5);
gridBagConstraints15.weightx = 1.0D;
gridBagConstraints15.weighty = 1.0D;
gridBagConstraints15.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints15.insets = new java.awt.Insets(2,2,2,2);
gridBagConstraints15.gridwidth = 3;
gridBagConstraints15.gridx = 0;
gridBagConstraints15.gridy = 2;
gridBagConstraints15.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints15.ipadx = 0;
gridBagConstraints15.ipady = 10;
jPanel.add(getJScrollPane(), gridBagConstraints15);
jPanel.add(jLabel2, gridBagConstraints13);
jPanel.add(getBtnCancel(), gridBagConstraints2);
jPanel.add(getBtnOk(), gridBagConstraints3);
}
return jPanel;
} }
|
public class class_name {
private JPanel getJPanel() {
if (jPanel == null) {
GridBagConstraints gridBagConstraints15 = new GridBagConstraints();
java.awt.GridBagConstraints gridBagConstraints13 = new GridBagConstraints();
javax.swing.JLabel jLabel2 = new JLabel();
java.awt.GridBagConstraints gridBagConstraints3 = new GridBagConstraints();
java.awt.GridBagConstraints gridBagConstraints2 = new GridBagConstraints();
jPanel = new JPanel();
// depends on control dependency: [if], data = [none]
jPanel.setLayout(new GridBagLayout());
// depends on control dependency: [if], data = [none]
jPanel.setPreferredSize(new java.awt.Dimension(400,400));
// depends on control dependency: [if], data = [none]
jPanel.setMinimumSize(new java.awt.Dimension(400,400));
// depends on control dependency: [if], data = [none]
gridBagConstraints2.gridx = 1;
// depends on control dependency: [if], data = [none]
gridBagConstraints2.gridy = 5;
// depends on control dependency: [if], data = [none]
gridBagConstraints2.insets = new java.awt.Insets(2,2,2,2);
// depends on control dependency: [if], data = [none]
gridBagConstraints2.anchor = java.awt.GridBagConstraints.EAST;
// depends on control dependency: [if], data = [none]
gridBagConstraints3.gridx = 2;
// depends on control dependency: [if], data = [none]
gridBagConstraints3.gridy = 5;
// depends on control dependency: [if], data = [none]
gridBagConstraints3.insets = new java.awt.Insets(2,2,2,10);
// depends on control dependency: [if], data = [none]
gridBagConstraints3.anchor = java.awt.GridBagConstraints.EAST;
// depends on control dependency: [if], data = [none]
gridBagConstraints13.gridx = 0;
// depends on control dependency: [if], data = [none]
gridBagConstraints13.gridy = 5;
// depends on control dependency: [if], data = [none]
gridBagConstraints13.fill = java.awt.GridBagConstraints.HORIZONTAL;
// depends on control dependency: [if], data = [none]
gridBagConstraints13.weightx = 1.0D;
// depends on control dependency: [if], data = [none]
gridBagConstraints13.insets = new java.awt.Insets(2,10,2,5);
// depends on control dependency: [if], data = [none]
gridBagConstraints15.weightx = 1.0D;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.weighty = 1.0D;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.fill = java.awt.GridBagConstraints.BOTH;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.insets = new java.awt.Insets(2,2,2,2);
// depends on control dependency: [if], data = [none]
gridBagConstraints15.gridwidth = 3;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.gridx = 0;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.gridy = 2;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.anchor = java.awt.GridBagConstraints.NORTHWEST;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.ipadx = 0;
// depends on control dependency: [if], data = [none]
gridBagConstraints15.ipady = 10;
// depends on control dependency: [if], data = [none]
jPanel.add(getJScrollPane(), gridBagConstraints15);
// depends on control dependency: [if], data = [none]
jPanel.add(jLabel2, gridBagConstraints13);
// depends on control dependency: [if], data = [none]
jPanel.add(getBtnCancel(), gridBagConstraints2);
// depends on control dependency: [if], data = [none]
jPanel.add(getBtnOk(), gridBagConstraints3);
// depends on control dependency: [if], data = [none]
}
return jPanel;
} }
|
public class class_name {
public void compress ()
{
for (Iterator<CountEntry<K>> it = _backing.values().iterator(); it.hasNext(); ) {
if (it.next().count == 0) {
it.remove();
}
}
} }
|
public class class_name {
public void compress ()
{
for (Iterator<CountEntry<K>> it = _backing.values().iterator(); it.hasNext(); ) {
if (it.next().count == 0) {
it.remove(); // depends on control dependency: [if], data = [none]
}
}
} }
|
public class class_name {
protected void setCommitDanglingWork(BeanMetaData bmd) {
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
int unresolvedAction = bmd._localTran.getUnresolvedAction();
if (unresolvedAction == LocalTransactionSettings.UNRESOLVED_COMMIT) {
bmd.commitDanglingWork = true;
}
if (isTraceOn && tc.isDebugEnabled()) {
Tr.debug(tc, "The commitDanglingWork flag is set to " + bmd.commitDanglingWork);
}
} }
|
public class class_name {
protected void setCommitDanglingWork(BeanMetaData bmd) {
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
int unresolvedAction = bmd._localTran.getUnresolvedAction();
if (unresolvedAction == LocalTransactionSettings.UNRESOLVED_COMMIT) {
bmd.commitDanglingWork = true; // depends on control dependency: [if], data = [none]
}
if (isTraceOn && tc.isDebugEnabled()) {
Tr.debug(tc, "The commitDanglingWork flag is set to " + bmd.commitDanglingWork); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public static String RSAKeyToString (PublicKey key)
{
try {
KeyFactory kf = KeyFactory.getInstance("RSA");
RSAPublicKeySpec spec = kf.getKeySpec(key, RSAPublicKeySpec.class);
StringBuilder buf = new StringBuilder();
buf.append(spec.getModulus().toString(16))
.append(SPLIT)
.append(spec.getPublicExponent().toString(16));
return buf.toString();
} catch (GeneralSecurityException gse) {
log.warning("Failed to convert key to string", gse);
}
return null;
} }
|
public class class_name {
public static String RSAKeyToString (PublicKey key)
{
try {
KeyFactory kf = KeyFactory.getInstance("RSA");
RSAPublicKeySpec spec = kf.getKeySpec(key, RSAPublicKeySpec.class);
StringBuilder buf = new StringBuilder();
buf.append(spec.getModulus().toString(16))
.append(SPLIT)
.append(spec.getPublicExponent().toString(16)); // depends on control dependency: [try], data = [none]
return buf.toString(); // depends on control dependency: [try], data = [none]
} catch (GeneralSecurityException gse) {
log.warning("Failed to convert key to string", gse);
} // depends on control dependency: [catch], data = [none]
return null;
} }
|
public class class_name {
private void handleModule(Node n, Node parent) {
pushScope(n);
curNode = n;
if (callback.shouldTraverse(this, n, parent)) {
curNode = n;
traverseChildren(n);
callback.visit(this, n, parent);
}
popScope();
} }
|
public class class_name {
private void handleModule(Node n, Node parent) {
pushScope(n);
curNode = n;
if (callback.shouldTraverse(this, n, parent)) {
curNode = n; // depends on control dependency: [if], data = [none]
traverseChildren(n); // depends on control dependency: [if], data = [none]
callback.visit(this, n, parent); // depends on control dependency: [if], data = [none]
}
popScope();
} }
|
public class class_name {
@Override
public EObjectInComment computeEObjectReferencedInComment(XtextResource resource, int offset) {
IParseResult parseResult = resource.getParseResult();
if(parseResult != null) {
INode rootNode = parseResult.getRootNode();
INode node = NodeModelUtils.findLeafNodeAtOffset(rootNode, offset);
EObject semanticObject = NodeModelUtils.findActualSemanticObjectFor(node);
if(semanticObject != null) {
EReference reference = getEReference(semanticObject, node, offset);
if(reference != null) {
IScope scope = getScope(semanticObject, reference, node, offset);
List<ReplaceRegion> eObjectReferences = computeTypeRefRegions(node);
for(ReplaceRegion eObjectReference : eObjectReferences) {
if(eObjectReference.getOffset() <= offset && offset <= eObjectReference.getEndOffset()) {
String eObjectReferenceText = eObjectReference.getText();
if(!Strings.isNullOrEmpty(eObjectReferenceText)) {
ITextRegion region = new TextRegion(eObjectReference.getOffset(), eObjectReference.getLength());
IEObjectDescription candidate = getElementFromScope(scope, node, region, eObjectReferenceText);
if(candidate != null) {
EObject eObject = candidate.getEObjectOrProxy();
if(eObject != null) {
return new EObjectInComment(eObject, region);
}
}
}
}
}
}
}
}
return null;
} }
|
public class class_name {
@Override
public EObjectInComment computeEObjectReferencedInComment(XtextResource resource, int offset) {
IParseResult parseResult = resource.getParseResult();
if(parseResult != null) {
INode rootNode = parseResult.getRootNode();
INode node = NodeModelUtils.findLeafNodeAtOffset(rootNode, offset);
EObject semanticObject = NodeModelUtils.findActualSemanticObjectFor(node);
if(semanticObject != null) {
EReference reference = getEReference(semanticObject, node, offset);
if(reference != null) {
IScope scope = getScope(semanticObject, reference, node, offset);
List<ReplaceRegion> eObjectReferences = computeTypeRefRegions(node);
for(ReplaceRegion eObjectReference : eObjectReferences) {
if(eObjectReference.getOffset() <= offset && offset <= eObjectReference.getEndOffset()) {
String eObjectReferenceText = eObjectReference.getText();
if(!Strings.isNullOrEmpty(eObjectReferenceText)) {
ITextRegion region = new TextRegion(eObjectReference.getOffset(), eObjectReference.getLength());
IEObjectDescription candidate = getElementFromScope(scope, node, region, eObjectReferenceText);
if(candidate != null) {
EObject eObject = candidate.getEObjectOrProxy();
if(eObject != null) {
return new EObjectInComment(eObject, region); // depends on control dependency: [if], data = [(eObject]
}
}
}
}
}
}
}
}
return null;
} }
|
public class class_name {
public BatchReadRequest withOperations(BatchReadOperation... operations) {
if (this.operations == null) {
setOperations(new java.util.ArrayList<BatchReadOperation>(operations.length));
}
for (BatchReadOperation ele : operations) {
this.operations.add(ele);
}
return this;
} }
|
public class class_name {
public BatchReadRequest withOperations(BatchReadOperation... operations) {
if (this.operations == null) {
setOperations(new java.util.ArrayList<BatchReadOperation>(operations.length)); // depends on control dependency: [if], data = [none]
}
for (BatchReadOperation ele : operations) {
this.operations.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} }
|
public class class_name {
public static String print(Doc doc) {
final StringBuilder builder = new StringBuilder(doc.pkg.name.isEmpty() ? "" : ("package " + doc.pkg.name + "\n\n"));
if (!doc.imports.isEmpty()) {
for (Imprt imp : doc.imports) {
builder.append("import " + imp.name + "\n");
}
builder.append("\n");
}
for (DataType dataType : doc.dataTypes) {
builder.append(print(dataType));
builder.append("\n");
}
return builder.toString();
} }
|
public class class_name {
public static String print(Doc doc) {
final StringBuilder builder = new StringBuilder(doc.pkg.name.isEmpty() ? "" : ("package " + doc.pkg.name + "\n\n"));
if (!doc.imports.isEmpty()) {
for (Imprt imp : doc.imports) {
builder.append("import " + imp.name + "\n"); // depends on control dependency: [for], data = [imp]
}
builder.append("\n"); // depends on control dependency: [if], data = [none]
}
for (DataType dataType : doc.dataTypes) {
builder.append(print(dataType)); // depends on control dependency: [for], data = [dataType]
builder.append("\n"); // depends on control dependency: [for], data = [none]
}
return builder.toString();
} }
|
public class class_name {
public boolean removeChild( AstNode child ) {
boolean result = this.children.remove(child);
if (result) {
child.parent = null;
}
return result;
} }
|
public class class_name {
public boolean removeChild( AstNode child ) {
boolean result = this.children.remove(child);
if (result) {
child.parent = null; // depends on control dependency: [if], data = [none]
}
return result;
} }
|
public class class_name {
@Override // DataNodeMXBean
public String getVolumeInfo() {
final Map<String, Object> info = new HashMap<String, Object>();
try {
FSVolume[] volumes = ((FSDataset)this.data).volumes.getVolumes();
for (FSVolume v : volumes) {
final Map<String, Object> innerInfo = new HashMap<String, Object>();
innerInfo.put("usedSpace", v.getDfsUsed());
innerInfo.put("freeSpace", v.getAvailable());
innerInfo.put("reservedSpace", v.getReserved());
info.put(v.getDir().toString(), innerInfo);
}
return JSON.toString(info);
} catch (IOException e) {
LOG.info("Cannot get volume info.", e);
return "ERROR";
}
} }
|
public class class_name {
@Override // DataNodeMXBean
public String getVolumeInfo() {
final Map<String, Object> info = new HashMap<String, Object>();
try {
FSVolume[] volumes = ((FSDataset)this.data).volumes.getVolumes();
for (FSVolume v : volumes) {
final Map<String, Object> innerInfo = new HashMap<String, Object>();
innerInfo.put("usedSpace", v.getDfsUsed()); // depends on control dependency: [for], data = [v]
innerInfo.put("freeSpace", v.getAvailable()); // depends on control dependency: [for], data = [v]
innerInfo.put("reservedSpace", v.getReserved()); // depends on control dependency: [for], data = [v]
info.put(v.getDir().toString(), innerInfo); // depends on control dependency: [for], data = [v]
}
return JSON.toString(info); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
LOG.info("Cannot get volume info.", e);
return "ERROR";
} // depends on control dependency: [catch], data = [none]
} }
|
public class class_name {
public void disconnect(){
if(timer != null){
timer.cancel();
}
if (ws != null && ws.isOpen()) {
ws.disconnect();
subscribedTokens = new HashSet<>();
modeMap.clear();
}
} }
|
public class class_name {
public void disconnect(){
if(timer != null){
timer.cancel(); // depends on control dependency: [if], data = [none]
}
if (ws != null && ws.isOpen()) {
ws.disconnect(); // depends on control dependency: [if], data = [none]
subscribedTokens = new HashSet<>(); // depends on control dependency: [if], data = [none]
modeMap.clear(); // depends on control dependency: [if], data = [none]
}
} }
|
public class class_name {
public void enforceVersion( Log log, String variableName, String requiredVersionRange, ArtifactVersion actualVersion )
throws EnforcerRuleException
{
if ( StringUtils.isEmpty( requiredVersionRange ) )
{
throw new EnforcerRuleException( variableName + " version can't be empty." );
}
else
{
VersionRange vr;
String msg = "Detected " + variableName + " Version: " + actualVersion;
// short circuit check if the strings are exactly equal
if ( actualVersion.toString().equals( requiredVersionRange ) )
{
log.debug( msg + " is allowed in the range " + requiredVersionRange + "." );
}
else
{
try
{
vr = VersionRange.createFromVersionSpec( requiredVersionRange );
if ( containsVersion( vr, actualVersion ) )
{
log.debug( msg + " is allowed in the range " + requiredVersionRange + "." );
}
else
{
String message = getMessage();
if ( StringUtils.isEmpty( message ) )
{
message = msg + " is not in the allowed range " + vr + ".";
}
throw new EnforcerRuleException( message );
}
}
catch ( InvalidVersionSpecificationException e )
{
throw new EnforcerRuleException( "The requested " + variableName + " version "
+ requiredVersionRange + " is invalid.", e );
}
}
}
} }
|
public class class_name {
public void enforceVersion( Log log, String variableName, String requiredVersionRange, ArtifactVersion actualVersion )
throws EnforcerRuleException
{
if ( StringUtils.isEmpty( requiredVersionRange ) )
{
throw new EnforcerRuleException( variableName + " version can't be empty." );
}
else
{
VersionRange vr;
String msg = "Detected " + variableName + " Version: " + actualVersion;
// short circuit check if the strings are exactly equal
if ( actualVersion.toString().equals( requiredVersionRange ) )
{
log.debug( msg + " is allowed in the range " + requiredVersionRange + "." ); // depends on control dependency: [if], data = [none]
}
else
{
try
{
vr = VersionRange.createFromVersionSpec( requiredVersionRange ); // depends on control dependency: [try], data = [none]
if ( containsVersion( vr, actualVersion ) )
{
log.debug( msg + " is allowed in the range " + requiredVersionRange + "." ); // depends on control dependency: [if], data = [none]
}
else
{
String message = getMessage();
if ( StringUtils.isEmpty( message ) )
{
message = msg + " is not in the allowed range " + vr + "."; // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
}
throw new EnforcerRuleException( message );
}
}
catch ( InvalidVersionSpecificationException e )
{
throw new EnforcerRuleException( "The requested " + variableName + " version "
+ requiredVersionRange + " is invalid.", e );
} // depends on control dependency: [catch], data = [none]
}
}
} }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.