code stringlengths 130 281k | code_dependency stringlengths 182 306k |
|---|---|
public class class_name {
public void setEntityAggregates(java.util.Collection<EntityAggregate> entityAggregates) {
if (entityAggregates == null) {
this.entityAggregates = null;
return;
}
this.entityAggregates = new java.util.ArrayList<EntityAggregate>(entityAggregates);
} } | public class class_name {
public void setEntityAggregates(java.util.Collection<EntityAggregate> entityAggregates) {
if (entityAggregates == null) {
this.entityAggregates = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.entityAggregates = new java.util.ArrayList<EntityAggregate>(entityAggregates);
} } |
public class class_name {
public static TreeSet<Integer> validateDuplicatedColumnNumber(final Class<?> beanType, final List<ColumnMapping> list) {
final TreeSet<Integer> checkedNumber = new TreeSet<>();
final TreeSet<Integer> duplicatedNumbers = new TreeSet<>();
for(ColumnMapping columnMapping : list) {
if(checkedNumber.contains(columnMapping.getNumber())) {
duplicatedNumbers.add(columnMapping.getNumber());
}
checkedNumber.add(columnMapping.getNumber());
}
if(!duplicatedNumbers.isEmpty()) {
// 重複している 属性 numberが存在する場合
throw new SuperCsvInvalidAnnotationException(MessageBuilder.create("anno.attr.duplicated")
.var("property", beanType.getName())
.varWithAnno("anno", CsvColumn.class)
.var("attrName", "number")
.var("attrValues", duplicatedNumbers)
.format());
}
// カラム番号が1以上かチェックする
final int minColumnNumber = checkedNumber.first();
if(minColumnNumber <= 0) {
throw new SuperCsvInvalidAnnotationException(MessageBuilder.create("anno.attr.min")
.var("property", beanType.getName())
.varWithAnno("anno", CsvColumn.class)
.var("attrName", "number")
.var("attrValue", minColumnNumber)
.var("min", 1)
.format());
}
return checkedNumber;
} } | public class class_name {
public static TreeSet<Integer> validateDuplicatedColumnNumber(final Class<?> beanType, final List<ColumnMapping> list) {
final TreeSet<Integer> checkedNumber = new TreeSet<>();
final TreeSet<Integer> duplicatedNumbers = new TreeSet<>();
for(ColumnMapping columnMapping : list) {
if(checkedNumber.contains(columnMapping.getNumber())) {
duplicatedNumbers.add(columnMapping.getNumber());
// depends on control dependency: [if], data = [none]
}
checkedNumber.add(columnMapping.getNumber());
// depends on control dependency: [for], data = [columnMapping]
}
if(!duplicatedNumbers.isEmpty()) {
// 重複している 属性 numberが存在する場合
throw new SuperCsvInvalidAnnotationException(MessageBuilder.create("anno.attr.duplicated")
.var("property", beanType.getName())
.varWithAnno("anno", CsvColumn.class)
.var("attrName", "number")
.var("attrValues", duplicatedNumbers)
.format());
}
// カラム番号が1以上かチェックする
final int minColumnNumber = checkedNumber.first();
if(minColumnNumber <= 0) {
throw new SuperCsvInvalidAnnotationException(MessageBuilder.create("anno.attr.min")
.var("property", beanType.getName())
.varWithAnno("anno", CsvColumn.class)
.var("attrName", "number")
.var("attrValue", minColumnNumber)
.var("min", 1)
.format());
}
return checkedNumber;
} } |
public class class_name {
public void marshall(HierarchyLevel hierarchyLevel, ProtocolMarshaller protocolMarshaller) {
if (hierarchyLevel == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(hierarchyLevel.getId(), ID_BINDING);
protocolMarshaller.marshall(hierarchyLevel.getArn(), ARN_BINDING);
protocolMarshaller.marshall(hierarchyLevel.getName(), NAME_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(HierarchyLevel hierarchyLevel, ProtocolMarshaller protocolMarshaller) {
if (hierarchyLevel == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(hierarchyLevel.getId(), ID_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(hierarchyLevel.getArn(), ARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(hierarchyLevel.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected boolean renameClientObject (Name oldname, Name newname)
{
ClientObject clobj = _objmap.remove(oldname);
if (clobj == null) {
log.warning("Requested to rename unmapped client object", "username", oldname,
new Exception());
return false;
}
_objmap.put(newname, clobj);
return true;
} } | public class class_name {
protected boolean renameClientObject (Name oldname, Name newname)
{
ClientObject clobj = _objmap.remove(oldname);
if (clobj == null) {
log.warning("Requested to rename unmapped client object", "username", oldname,
new Exception()); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
_objmap.put(newname, clobj);
return true;
} } |
public class class_name {
private String getFieldName(String contentDisposition) {
String fieldName = null;
if (contentDisposition != null && contentDisposition.toLowerCase().startsWith(FORM_DATA)) {
ParameterParser parser = new ParameterParser();
parser.setLowerCaseNames(true);
// parameter parser can handle null input
Map<String,String> params = parser.parse(contentDisposition, ';');
fieldName = (String) params.get("name");
if (fieldName != null) {
fieldName = fieldName.trim();
}
}
return fieldName;
} } | public class class_name {
private String getFieldName(String contentDisposition) {
String fieldName = null;
if (contentDisposition != null && contentDisposition.toLowerCase().startsWith(FORM_DATA)) {
ParameterParser parser = new ParameterParser();
parser.setLowerCaseNames(true); // depends on control dependency: [if], data = [none]
// parameter parser can handle null input
Map<String,String> params = parser.parse(contentDisposition, ';');
fieldName = (String) params.get("name"); // depends on control dependency: [if], data = [none]
if (fieldName != null) {
fieldName = fieldName.trim(); // depends on control dependency: [if], data = [none]
}
}
return fieldName;
} } |
public class class_name {
public final EObject ruleXOrExpression() throws RecognitionException {
EObject current = null;
EObject this_XAndExpression_0 = null;
EObject lv_rightOperand_3_0 = null;
enterRule();
try {
// InternalXbase.g:353:2: ( (this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )* ) )
// InternalXbase.g:354:2: (this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )* )
{
// InternalXbase.g:354:2: (this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )* )
// InternalXbase.g:355:3: this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )*
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXOrExpressionAccess().getXAndExpressionParserRuleCall_0());
}
pushFollow(FOLLOW_9);
this_XAndExpression_0=ruleXAndExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XAndExpression_0;
afterParserOrEnumRuleCall();
}
// InternalXbase.g:363:3: ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )*
loop5:
do {
int alt5=2;
int LA5_0 = input.LA(1);
if ( (LA5_0==22) ) {
int LA5_2 = input.LA(2);
if ( (synpred2_InternalXbase()) ) {
alt5=1;
}
}
switch (alt5) {
case 1 :
// InternalXbase.g:364:4: ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) )
{
// InternalXbase.g:364:4: ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) )
// InternalXbase.g:365:5: ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) )
{
// InternalXbase.g:375:5: ( () ( ( ruleOpOr ) ) )
// InternalXbase.g:376:6: () ( ( ruleOpOr ) )
{
// InternalXbase.g:376:6: ()
// InternalXbase.g:377:7:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElementAndSet(
grammarAccess.getXOrExpressionAccess().getXBinaryOperationLeftOperandAction_1_0_0_0(),
current);
}
}
// InternalXbase.g:383:6: ( ( ruleOpOr ) )
// InternalXbase.g:384:7: ( ruleOpOr )
{
// InternalXbase.g:384:7: ( ruleOpOr )
// InternalXbase.g:385:8: ruleOpOr
{
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXOrExpressionRule());
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXOrExpressionAccess().getFeatureJvmIdentifiableElementCrossReference_1_0_0_1_0());
}
pushFollow(FOLLOW_4);
ruleOpOr();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall();
}
}
}
}
}
// InternalXbase.g:401:4: ( (lv_rightOperand_3_0= ruleXAndExpression ) )
// InternalXbase.g:402:5: (lv_rightOperand_3_0= ruleXAndExpression )
{
// InternalXbase.g:402:5: (lv_rightOperand_3_0= ruleXAndExpression )
// InternalXbase.g:403:6: lv_rightOperand_3_0= ruleXAndExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXOrExpressionAccess().getRightOperandXAndExpressionParserRuleCall_1_1_0());
}
pushFollow(FOLLOW_9);
lv_rightOperand_3_0=ruleXAndExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXOrExpressionRule());
}
set(
current,
"rightOperand",
lv_rightOperand_3_0,
"org.eclipse.xtext.xbase.Xbase.XAndExpression");
afterParserOrEnumRuleCall();
}
}
}
}
break;
default :
break loop5;
}
} while (true);
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} } | public class class_name {
public final EObject ruleXOrExpression() throws RecognitionException {
EObject current = null;
EObject this_XAndExpression_0 = null;
EObject lv_rightOperand_3_0 = null;
enterRule();
try {
// InternalXbase.g:353:2: ( (this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )* ) )
// InternalXbase.g:354:2: (this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )* )
{
// InternalXbase.g:354:2: (this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )* )
// InternalXbase.g:355:3: this_XAndExpression_0= ruleXAndExpression ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )*
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXOrExpressionAccess().getXAndExpressionParserRuleCall_0()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_9);
this_XAndExpression_0=ruleXAndExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XAndExpression_0; // depends on control dependency: [if], data = [none]
afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none]
}
// InternalXbase.g:363:3: ( ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) ) )*
loop5:
do {
int alt5=2;
int LA5_0 = input.LA(1);
if ( (LA5_0==22) ) {
int LA5_2 = input.LA(2);
if ( (synpred2_InternalXbase()) ) {
alt5=1; // depends on control dependency: [if], data = [none]
}
}
switch (alt5) {
case 1 :
// InternalXbase.g:364:4: ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) ) ( (lv_rightOperand_3_0= ruleXAndExpression ) )
{
// InternalXbase.g:364:4: ( ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) ) )
// InternalXbase.g:365:5: ( ( () ( ( ruleOpOr ) ) ) )=> ( () ( ( ruleOpOr ) ) )
{
// InternalXbase.g:375:5: ( () ( ( ruleOpOr ) ) )
// InternalXbase.g:376:6: () ( ( ruleOpOr ) )
{
// InternalXbase.g:376:6: ()
// InternalXbase.g:377:7:
{
if ( state.backtracking==0 ) {
current = forceCreateModelElementAndSet(
grammarAccess.getXOrExpressionAccess().getXBinaryOperationLeftOperandAction_1_0_0_0(),
current); // depends on control dependency: [if], data = [none]
}
}
// InternalXbase.g:383:6: ( ( ruleOpOr ) )
// InternalXbase.g:384:7: ( ruleOpOr )
{
// InternalXbase.g:384:7: ( ruleOpOr )
// InternalXbase.g:385:8: ruleOpOr
{
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElement(grammarAccess.getXOrExpressionRule()); // depends on control dependency: [if], data = [none]
}
}
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXOrExpressionAccess().getFeatureJvmIdentifiableElementCrossReference_1_0_0_1_0()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_4);
ruleOpOr();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none]
}
}
}
}
}
// InternalXbase.g:401:4: ( (lv_rightOperand_3_0= ruleXAndExpression ) )
// InternalXbase.g:402:5: (lv_rightOperand_3_0= ruleXAndExpression )
{
// InternalXbase.g:402:5: (lv_rightOperand_3_0= ruleXAndExpression )
// InternalXbase.g:403:6: lv_rightOperand_3_0= ruleXAndExpression
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getXOrExpressionAccess().getRightOperandXAndExpressionParserRuleCall_1_1_0()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_9);
lv_rightOperand_3_0=ruleXAndExpression();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
if (current==null) {
current = createModelElementForParent(grammarAccess.getXOrExpressionRule()); // depends on control dependency: [if], data = [none]
}
set(
current,
"rightOperand",
lv_rightOperand_3_0,
"org.eclipse.xtext.xbase.Xbase.XAndExpression"); // depends on control dependency: [if], data = [none]
afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none]
}
}
}
}
break;
default :
break loop5;
}
} while (true);
}
}
if ( state.backtracking==0 ) {
leaveRule(); // depends on control dependency: [if], data = [none]
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} } |
public class class_name {
public static void assertNotEquals(String message, float unexpected,
float actual, float delta) {
if (!floatIsDifferent(unexpected, actual, delta)) {
failEquals(message, actual);
}
} } | public class class_name {
public static void assertNotEquals(String message, float unexpected,
float actual, float delta) {
if (!floatIsDifferent(unexpected, actual, delta)) {
failEquals(message, actual); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public ServiceCall<EntityCollection> listEntities(ListEntitiesOptions listEntitiesOptions) {
Validator.notNull(listEntitiesOptions, "listEntitiesOptions cannot be null");
String[] pathSegments = { "v1/workspaces", "entities" };
String[] pathParameters = { listEntitiesOptions.workspaceId() };
RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments,
pathParameters));
builder.query("version", versionDate);
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("conversation", "v1", "listEntities");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue());
}
builder.header("Accept", "application/json");
if (listEntitiesOptions.export() != null) {
builder.query("export", String.valueOf(listEntitiesOptions.export()));
}
if (listEntitiesOptions.pageLimit() != null) {
builder.query("page_limit", String.valueOf(listEntitiesOptions.pageLimit()));
}
if (listEntitiesOptions.includeCount() != null) {
builder.query("include_count", String.valueOf(listEntitiesOptions.includeCount()));
}
if (listEntitiesOptions.sort() != null) {
builder.query("sort", listEntitiesOptions.sort());
}
if (listEntitiesOptions.cursor() != null) {
builder.query("cursor", listEntitiesOptions.cursor());
}
if (listEntitiesOptions.includeAudit() != null) {
builder.query("include_audit", String.valueOf(listEntitiesOptions.includeAudit()));
}
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(EntityCollection.class));
} } | public class class_name {
public ServiceCall<EntityCollection> listEntities(ListEntitiesOptions listEntitiesOptions) {
Validator.notNull(listEntitiesOptions, "listEntitiesOptions cannot be null");
String[] pathSegments = { "v1/workspaces", "entities" };
String[] pathParameters = { listEntitiesOptions.workspaceId() };
RequestBuilder builder = RequestBuilder.get(RequestBuilder.constructHttpUrl(getEndPoint(), pathSegments,
pathParameters));
builder.query("version", versionDate);
Map<String, String> sdkHeaders = SdkCommon.getSdkHeaders("conversation", "v1", "listEntities");
for (Entry<String, String> header : sdkHeaders.entrySet()) {
builder.header(header.getKey(), header.getValue()); // depends on control dependency: [for], data = [header]
}
builder.header("Accept", "application/json");
if (listEntitiesOptions.export() != null) {
builder.query("export", String.valueOf(listEntitiesOptions.export())); // depends on control dependency: [if], data = [(listEntitiesOptions.export()]
}
if (listEntitiesOptions.pageLimit() != null) {
builder.query("page_limit", String.valueOf(listEntitiesOptions.pageLimit())); // depends on control dependency: [if], data = [(listEntitiesOptions.pageLimit()]
}
if (listEntitiesOptions.includeCount() != null) {
builder.query("include_count", String.valueOf(listEntitiesOptions.includeCount())); // depends on control dependency: [if], data = [(listEntitiesOptions.includeCount()]
}
if (listEntitiesOptions.sort() != null) {
builder.query("sort", listEntitiesOptions.sort()); // depends on control dependency: [if], data = [none]
}
if (listEntitiesOptions.cursor() != null) {
builder.query("cursor", listEntitiesOptions.cursor()); // depends on control dependency: [if], data = [none]
}
if (listEntitiesOptions.includeAudit() != null) {
builder.query("include_audit", String.valueOf(listEntitiesOptions.includeAudit())); // depends on control dependency: [if], data = [(listEntitiesOptions.includeAudit()]
}
return createServiceCall(builder.build(), ResponseConverterUtils.getObject(EntityCollection.class));
} } |
public class class_name {
@Override
public boolean centerRootTopicIfFocusedMMD() {
boolean result = false;
final TabTitle title = this.getFocusedTab();
if (title != null && title.getProvider().getEditor().getEditorContentType() == EditorContentType.MINDMAP) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
((MMDEditor) title.getProvider().getEditor()).rootToCentre();
}
});
result = true;
}
return result;
} } | public class class_name {
@Override
public boolean centerRootTopicIfFocusedMMD() {
boolean result = false;
final TabTitle title = this.getFocusedTab();
if (title != null && title.getProvider().getEditor().getEditorContentType() == EditorContentType.MINDMAP) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
((MMDEditor) title.getProvider().getEditor()).rootToCentre();
}
}); // depends on control dependency: [if], data = [none]
result = true; // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
public static void splitAssociated( List<AssociatedPair> pairs ,
List<Point2D_F64> view1 , List<Point2D_F64> view2 ) {
for( AssociatedPair p : pairs ) {
view1.add(p.p1);
view2.add(p.p2);
}
} } | public class class_name {
public static void splitAssociated( List<AssociatedPair> pairs ,
List<Point2D_F64> view1 , List<Point2D_F64> view2 ) {
for( AssociatedPair p : pairs ) {
view1.add(p.p1); // depends on control dependency: [for], data = [p]
view2.add(p.p2); // depends on control dependency: [for], data = [p]
}
} } |
public class class_name {
public static String generateFilterCondition(String propertyName, String operation, final byte[] value) {
StringBuilder sb = new StringBuilder();
Formatter formatter = new Formatter(sb);
for (byte b : value) {
formatter.format("%02x", b);
}
formatter.flush();
formatter.close();
return generateFilterCondition(propertyName, operation, sb.toString(), EdmType.BINARY);
} } | public class class_name {
public static String generateFilterCondition(String propertyName, String operation, final byte[] value) {
StringBuilder sb = new StringBuilder();
Formatter formatter = new Formatter(sb);
for (byte b : value) {
formatter.format("%02x", b); // depends on control dependency: [for], data = [b]
}
formatter.flush();
formatter.close();
return generateFilterCondition(propertyName, operation, sb.toString(), EdmType.BINARY);
} } |
public class class_name {
static void processReport(FSNamesystem namesystem, Collection<Block> toRetry,
BlockListAsLongs newReport, DatanodeDescriptor node,
ExecutorService initialBlockReportExecutor) throws IOException {
// spawn one thread for blocksPerShardBR blocks
int numShards = Math
.min(
namesystem.parallelProcessingThreads,
((newReport.getNumberOfBlocks()
+ namesystem.parallelBRblocksPerShard - 1) / namesystem.parallelBRblocksPerShard));
List<Future<List<Block>>> workers = new ArrayList<Future<List<Block>>>(
numShards);
// submit tasks for execution
for (int i = 0; i < numShards; i++) {
workers.add(initialBlockReportExecutor.submit(new InitialReportWorker(
newReport, i, numShards, node, namesystem.getNameNode()
.shouldRetryAbsentBlocks(), namesystem)));
}
// get results and add to retry list if need
try {
for (Future<List<Block>> worker : workers) {
if (namesystem.getNameNode().shouldRetryAbsentBlocks()) {
toRetry.addAll(worker.get());
} else {
worker.get();
}
}
} catch (ExecutionException e) {
LOG.warn("Parallel report failed", e);
throw new IOException(e);
} catch (InterruptedException e) {
throw new IOException("Interruption", e);
}
} } | public class class_name {
static void processReport(FSNamesystem namesystem, Collection<Block> toRetry,
BlockListAsLongs newReport, DatanodeDescriptor node,
ExecutorService initialBlockReportExecutor) throws IOException {
// spawn one thread for blocksPerShardBR blocks
int numShards = Math
.min(
namesystem.parallelProcessingThreads,
((newReport.getNumberOfBlocks()
+ namesystem.parallelBRblocksPerShard - 1) / namesystem.parallelBRblocksPerShard));
List<Future<List<Block>>> workers = new ArrayList<Future<List<Block>>>(
numShards);
// submit tasks for execution
for (int i = 0; i < numShards; i++) {
workers.add(initialBlockReportExecutor.submit(new InitialReportWorker(
newReport, i, numShards, node, namesystem.getNameNode()
.shouldRetryAbsentBlocks(), namesystem)));
}
// get results and add to retry list if need
try {
for (Future<List<Block>> worker : workers) {
if (namesystem.getNameNode().shouldRetryAbsentBlocks()) {
toRetry.addAll(worker.get()); // depends on control dependency: [if], data = [none]
} else {
worker.get(); // depends on control dependency: [if], data = [none]
}
}
} catch (ExecutionException e) {
LOG.warn("Parallel report failed", e);
throw new IOException(e);
} catch (InterruptedException e) {
throw new IOException("Interruption", e);
}
} } |
public class class_name {
protected void showBootLogging() {
if (logger.isInfoEnabled()) {
logger.info("[Objective Config]");
logger.info(" " + appResource + " extends " + extendsResourceList);
final boolean checkImplicitOverride = prop.isCheckImplicitOverride();
final int count = prop.getJavaPropertiesResult().getPropertyList().size();
logger.info(" checkImplicitOverride=" + checkImplicitOverride + ", propertyCount=" + count);
if (bowgunPropertyFilter != null) {
logger.info(" bowgun=" + bowgunPropertyFilter); // because of important
}
// *no logging of all property values because it might contain security info
}
} } | public class class_name {
protected void showBootLogging() {
if (logger.isInfoEnabled()) {
logger.info("[Objective Config]"); // depends on control dependency: [if], data = [none]
logger.info(" " + appResource + " extends " + extendsResourceList); // depends on control dependency: [if], data = [none]
final boolean checkImplicitOverride = prop.isCheckImplicitOverride();
final int count = prop.getJavaPropertiesResult().getPropertyList().size();
logger.info(" checkImplicitOverride=" + checkImplicitOverride + ", propertyCount=" + count); // depends on control dependency: [if], data = [none]
if (bowgunPropertyFilter != null) {
logger.info(" bowgun=" + bowgunPropertyFilter); // because of important // depends on control dependency: [if], data = [none]
}
// *no logging of all property values because it might contain security info
}
} } |
public class class_name {
public static base_responses update(nitro_service client, systemcmdpolicy resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
systemcmdpolicy updateresources[] = new systemcmdpolicy[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new systemcmdpolicy();
updateresources[i].policyname = resources[i].policyname;
updateresources[i].action = resources[i].action;
updateresources[i].cmdspec = resources[i].cmdspec;
}
result = update_bulk_request(client, updateresources);
}
return result;
} } | public class class_name {
public static base_responses update(nitro_service client, systemcmdpolicy resources[]) throws Exception {
base_responses result = null;
if (resources != null && resources.length > 0) {
systemcmdpolicy updateresources[] = new systemcmdpolicy[resources.length];
for (int i=0;i<resources.length;i++){
updateresources[i] = new systemcmdpolicy(); // depends on control dependency: [for], data = [i]
updateresources[i].policyname = resources[i].policyname; // depends on control dependency: [for], data = [i]
updateresources[i].action = resources[i].action; // depends on control dependency: [for], data = [i]
updateresources[i].cmdspec = resources[i].cmdspec; // depends on control dependency: [for], data = [i]
}
result = update_bulk_request(client, updateresources);
}
return result;
} } |
public class class_name {
public void setViolationEvents(java.util.Collection<ViolationEvent> violationEvents) {
if (violationEvents == null) {
this.violationEvents = null;
return;
}
this.violationEvents = new java.util.ArrayList<ViolationEvent>(violationEvents);
} } | public class class_name {
public void setViolationEvents(java.util.Collection<ViolationEvent> violationEvents) {
if (violationEvents == null) {
this.violationEvents = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.violationEvents = new java.util.ArrayList<ViolationEvent>(violationEvents);
} } |
public class class_name {
public static boolean memcmp( byte[] a, int a_offset, byte[] b, int b_offset, int length ) {
if ((a == null) && (b == null)) {
return true;
}
if ((a == null) || (b == null)) {
return false;
}
for( int i = 0; i < length; ++i, ++a_offset, ++b_offset )
if (a[a_offset] != b[b_offset]) {
return false;
}
return true;
} } | public class class_name {
public static boolean memcmp( byte[] a, int a_offset, byte[] b, int b_offset, int length ) {
if ((a == null) && (b == null)) {
return true; // depends on control dependency: [if], data = [none]
}
if ((a == null) || (b == null)) {
return false; // depends on control dependency: [if], data = [none]
}
for( int i = 0; i < length; ++i, ++a_offset, ++b_offset )
if (a[a_offset] != b[b_offset]) {
return false; // depends on control dependency: [if], data = [none]
}
return true;
} } |
public class class_name {
public AttributeOverride<ManyToMany<T>> getOrCreateMapKeyAttributeOverride()
{
List<Node> nodeList = childNode.get("map-key-attribute-override");
if (nodeList != null && nodeList.size() > 0)
{
return new AttributeOverrideImpl<ManyToMany<T>>(this, "map-key-attribute-override", childNode, nodeList.get(0));
}
return createMapKeyAttributeOverride();
} } | public class class_name {
public AttributeOverride<ManyToMany<T>> getOrCreateMapKeyAttributeOverride()
{
List<Node> nodeList = childNode.get("map-key-attribute-override");
if (nodeList != null && nodeList.size() > 0)
{
return new AttributeOverrideImpl<ManyToMany<T>>(this, "map-key-attribute-override", childNode, nodeList.get(0)); // depends on control dependency: [if], data = [none]
}
return createMapKeyAttributeOverride();
} } |
public class class_name {
public static void main(String[] pArgs) throws IOException {
int iterations = 1000000;
byte[] bytes = "������ klashf lkash ljah lhaaklhghdfgu ksd".getBytes("UTF-8");
Decoder d;
long start;
long time;
Writer sink = new PrintWriter(new NullOutputStream());
StringWriter writer;
String str;
d = new StringDecoder("UTF-8");
for (int i = 0; i < 10000; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
}
start = System.currentTimeMillis();
for (int i = 0; i < iterations; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
}
time = DateUtil.delta(start);
System.out.println("StringDecoder");
System.out.println("time: " + time);
writer = new StringWriter();
d.decodeTo(writer, bytes, 0, bytes.length);
str = writer.toString();
System.out.println("str: \"" + str + "\"");
System.out.println("chars.length: " + str.length());
System.out.println();
if (NIO_AVAILABLE) {
d = new CharsetDecoder("UTF-8");
for (int i = 0; i < 10000; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
}
start = System.currentTimeMillis();
for (int i = 0; i < iterations; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
}
time = DateUtil.delta(start);
System.out.println("CharsetDecoder");
System.out.println("time: " + time);
writer = new StringWriter();
d.decodeTo(writer, bytes, 0, bytes.length);
str = writer.toString();
System.out.println("str: \"" + str + "\"");
System.out.println("chars.length: " + str.length());
System.out.println();
}
OutputStream os = new WriterOutputStream(new PrintWriter(System.out), "UTF-8");
os.write(bytes);
os.flush();
System.out.println();
for (byte b : bytes) {
os.write(b & 0xff);
}
os.flush();
} } | public class class_name {
public static void main(String[] pArgs) throws IOException {
int iterations = 1000000;
byte[] bytes = "������ klashf lkash ljah lhaaklhghdfgu ksd".getBytes("UTF-8");
Decoder d;
long start;
long time;
Writer sink = new PrintWriter(new NullOutputStream());
StringWriter writer;
String str;
d = new StringDecoder("UTF-8");
for (int i = 0; i < 10000; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
}
start = System.currentTimeMillis();
for (int i = 0; i < iterations; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
}
time = DateUtil.delta(start);
System.out.println("StringDecoder");
System.out.println("time: " + time);
writer = new StringWriter();
d.decodeTo(writer, bytes, 0, bytes.length);
str = writer.toString();
System.out.println("str: \"" + str + "\"");
System.out.println("chars.length: " + str.length());
System.out.println();
if (NIO_AVAILABLE) {
d = new CharsetDecoder("UTF-8");
for (int i = 0; i < 10000; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
// depends on control dependency: [for], data = [none]
}
start = System.currentTimeMillis();
for (int i = 0; i < iterations; i++) {
d.decodeTo(sink, bytes, 0, bytes.length);
// depends on control dependency: [for], data = [none]
}
time = DateUtil.delta(start);
System.out.println("CharsetDecoder");
System.out.println("time: " + time);
writer = new StringWriter();
d.decodeTo(writer, bytes, 0, bytes.length);
str = writer.toString();
System.out.println("str: \"" + str + "\"");
System.out.println("chars.length: " + str.length());
System.out.println();
}
OutputStream os = new WriterOutputStream(new PrintWriter(System.out), "UTF-8");
os.write(bytes);
os.flush();
System.out.println();
for (byte b : bytes) {
os.write(b & 0xff);
}
os.flush();
} } |
public class class_name {
public DescribeWorkspaceBundlesRequest withBundleIds(String... bundleIds) {
if (this.bundleIds == null) {
setBundleIds(new com.amazonaws.internal.SdkInternalList<String>(bundleIds.length));
}
for (String ele : bundleIds) {
this.bundleIds.add(ele);
}
return this;
} } | public class class_name {
public DescribeWorkspaceBundlesRequest withBundleIds(String... bundleIds) {
if (this.bundleIds == null) {
setBundleIds(new com.amazonaws.internal.SdkInternalList<String>(bundleIds.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : bundleIds) {
this.bundleIds.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
@Override
public void setErrorMode(ErrorMode newErrorMode)
{
Preconditions.checkNotNull(lockPath, "lockPath cannot be null");
if ( newErrorMode == ErrorMode.REQUEUE )
{
log.warn("ErrorMode.REQUEUE requires ZooKeeper version 3.4.x+ - make sure you are not using a prior version");
}
errorMode.set(newErrorMode);
} } | public class class_name {
@Override
public void setErrorMode(ErrorMode newErrorMode)
{
Preconditions.checkNotNull(lockPath, "lockPath cannot be null");
if ( newErrorMode == ErrorMode.REQUEUE )
{
log.warn("ErrorMode.REQUEUE requires ZooKeeper version 3.4.x+ - make sure you are not using a prior version"); // depends on control dependency: [if], data = [none]
}
errorMode.set(newErrorMode);
} } |
public class class_name {
public void logException(LogLevel logLevel, Throwable throwable, Class clazz, String methodName) {
Date timeStamp = new Date();
char border[] = new char[logLevel.toString().length() + 4];
Arrays.fill(border, '*');
String message;
if (throwable.getMessage() != null) {
message = throwable.getMessage().trim();
message = message.replace(System.getProperty("line.separator"), " => ");
}
else {
message = "No message.";
}
synchronized (this.syncObject) {
this.tracePrintStream.println(border);
this.tracePrintStream.printf("* %s * [%tc] [%d,%s] [%s] [%s] \"%s\"%n", logLevel.toString(), timeStamp, Thread.currentThread().getId(),
Thread.currentThread().getName(), clazz.getName(), methodName, message);
this.tracePrintStream.println(border);
throwable.printStackTrace(this.tracePrintStream);
}
} } | public class class_name {
public void logException(LogLevel logLevel, Throwable throwable, Class clazz, String methodName) {
Date timeStamp = new Date();
char border[] = new char[logLevel.toString().length() + 4];
Arrays.fill(border, '*');
String message;
if (throwable.getMessage() != null) {
message = throwable.getMessage().trim();
// depends on control dependency: [if], data = [none]
message = message.replace(System.getProperty("line.separator"), " => ");
}
else {
message = "No message.";
}
synchronized (this.syncObject) {
this.tracePrintStream.println(border);
this.tracePrintStream.printf("* %s * [%tc] [%d,%s] [%s] [%s] \"%s\"%n", logLevel.toString(), timeStamp, Thread.currentThread().getId(),
Thread.currentThread().getName(), clazz.getName(), methodName, message);
// depends on control dependency: [if], data = [none]
this.tracePrintStream.println(border);
// depends on control dependency: [if], data = [none]
throwable.printStackTrace(this.tracePrintStream);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void shutdownAndAwaitTermination() {
shutdown();
if (pool != null) {
try {
// wait a while for existing tasks to terminate
if (!pool.awaitTermination(60L, TimeUnit.SECONDS)) {
pool.shutdownNow(); // cancel currently executing tasks
// wait a while for tasks to respond to being canceled
if (!pool.awaitTermination(60L, TimeUnit.SECONDS)) {
logger.warn("BioJava ConcurrencyTools thread pool did not terminate");
}
}
} catch (InterruptedException ie) {
pool.shutdownNow(); // (re-)cancel if current thread also interrupted
Thread.currentThread().interrupt(); // preserve interrupt status
}
}
} } | public class class_name {
public static void shutdownAndAwaitTermination() {
shutdown();
if (pool != null) {
try {
// wait a while for existing tasks to terminate
if (!pool.awaitTermination(60L, TimeUnit.SECONDS)) {
pool.shutdownNow(); // cancel currently executing tasks // depends on control dependency: [if], data = [none]
// wait a while for tasks to respond to being canceled
if (!pool.awaitTermination(60L, TimeUnit.SECONDS)) {
logger.warn("BioJava ConcurrencyTools thread pool did not terminate"); // depends on control dependency: [if], data = [none]
}
}
} catch (InterruptedException ie) {
pool.shutdownNow(); // (re-)cancel if current thread also interrupted
Thread.currentThread().interrupt(); // preserve interrupt status
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
public long discharge(BitmapStorage container, long max) {
long index = 0;
while (true) {
if (index + getRunningLength() > max) {
final int offset = (int) (max - index);
container.addStreamOfEmptyWords(getRunningBit(), offset);
this.brlw.runningLength -= offset;
return max;
}
container.addStreamOfEmptyWords(getRunningBit(), getRunningLength());
index += getRunningLength();
if (getNumberOfLiteralWords() + index > max) {
final int offset =(int) (max - index);
writeLiteralWords(offset, container);
this.brlw.runningLength = 0;
this.brlw.numberOfLiteralWords -= offset;
this.literalWordStartPosition += offset;
return max;
}
writeLiteralWords(getNumberOfLiteralWords(), container);
index += getNumberOfLiteralWords();
if(!next()) break;
}
return index;
} } | public class class_name {
public long discharge(BitmapStorage container, long max) {
long index = 0;
while (true) {
if (index + getRunningLength() > max) {
final int offset = (int) (max - index);
container.addStreamOfEmptyWords(getRunningBit(), offset); // depends on control dependency: [if], data = [none]
this.brlw.runningLength -= offset; // depends on control dependency: [if], data = [none]
return max; // depends on control dependency: [if], data = [none]
}
container.addStreamOfEmptyWords(getRunningBit(), getRunningLength()); // depends on control dependency: [while], data = [none]
index += getRunningLength(); // depends on control dependency: [while], data = [none]
if (getNumberOfLiteralWords() + index > max) {
final int offset =(int) (max - index);
writeLiteralWords(offset, container); // depends on control dependency: [if], data = [none]
this.brlw.runningLength = 0; // depends on control dependency: [if], data = [none]
this.brlw.numberOfLiteralWords -= offset; // depends on control dependency: [if], data = [none]
this.literalWordStartPosition += offset; // depends on control dependency: [if], data = [none]
return max; // depends on control dependency: [if], data = [none]
}
writeLiteralWords(getNumberOfLiteralWords(), container); // depends on control dependency: [while], data = [none]
index += getNumberOfLiteralWords(); // depends on control dependency: [while], data = [none]
if(!next()) break;
}
return index;
} } |
public class class_name {
private void updateProperties(T target, Map<String, Object> source) {
Map<String, PropertyDescriptor> mapTargetProps = MappingUtils.mapPropertyDescriptors(target.getClass());
for (String sourceKey : source.keySet()) {
if (mapTargetProps.containsKey(sourceKey) == true) {
MappingUtils.callSetter(target, mapTargetProps.get(sourceKey), source.get(sourceKey));
}
}
} } | public class class_name {
private void updateProperties(T target, Map<String, Object> source) {
Map<String, PropertyDescriptor> mapTargetProps = MappingUtils.mapPropertyDescriptors(target.getClass());
for (String sourceKey : source.keySet()) {
if (mapTargetProps.containsKey(sourceKey) == true) {
MappingUtils.callSetter(target, mapTargetProps.get(sourceKey), source.get(sourceKey)); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
protected String decodePercent(String str) {
String decoded = null;
try {
decoded = URLDecoder.decode(str, "UTF8");
} catch (UnsupportedEncodingException ignored) {
}
return decoded;
} } | public class class_name {
protected String decodePercent(String str) {
String decoded = null;
try {
decoded = URLDecoder.decode(str, "UTF8"); // depends on control dependency: [try], data = [none]
} catch (UnsupportedEncodingException ignored) {
} // depends on control dependency: [catch], data = [none]
return decoded;
} } |
public class class_name {
public static int getFirstScreenWidth()
{
final GraphicsEnvironment graphicsEnvironment = GraphicsEnvironment
.getLocalGraphicsEnvironment();
final GraphicsDevice[] graphicsDevices = graphicsEnvironment.getScreenDevices();
int width = getScreenWidth();
for (final GraphicsDevice graphicsDevice : graphicsDevices)
{
final GraphicsConfiguration[] graphicsConfigurations = graphicsDevice
.getConfigurations();
final GraphicsConfiguration graphicsConfiguration = ArrayExtensions
.getFirst(graphicsConfigurations);
if (graphicsConfiguration != null)
{
final Rectangle bounds = graphicsConfiguration.getBounds();
final double w = bounds.getWidth();
width = (int)w;
break;
}
}
return width;
} } | public class class_name {
public static int getFirstScreenWidth()
{
final GraphicsEnvironment graphicsEnvironment = GraphicsEnvironment
.getLocalGraphicsEnvironment();
final GraphicsDevice[] graphicsDevices = graphicsEnvironment.getScreenDevices();
int width = getScreenWidth();
for (final GraphicsDevice graphicsDevice : graphicsDevices)
{
final GraphicsConfiguration[] graphicsConfigurations = graphicsDevice
.getConfigurations();
final GraphicsConfiguration graphicsConfiguration = ArrayExtensions
.getFirst(graphicsConfigurations);
if (graphicsConfiguration != null)
{
final Rectangle bounds = graphicsConfiguration.getBounds();
final double w = bounds.getWidth();
width = (int)w; // depends on control dependency: [if], data = [none]
break;
}
}
return width;
} } |
public class class_name {
public static void main(String[] args) throws IOException {
if (!parseInputArgs(args)) {
usage();
System.exit(EXIT_FAILED);
}
if (sHelp) {
usage();
System.exit(EXIT_SUCCEEDED);
}
if (!sNoTimeout && !stdinHasData()) {
System.exit(EXIT_FAILED);
}
JournalFormatter formatter = new ProtoBufJournalFormatter();
try (JournalInputStream journalStream = formatter.deserialize(System.in)) {
JournalEntry entry;
while ((entry = journalStream.read()) != null) {
System.out.print(entry);
System.out.println(ENTRY_SEPARATOR);
}
}
} } | public class class_name {
public static void main(String[] args) throws IOException {
if (!parseInputArgs(args)) {
usage();
System.exit(EXIT_FAILED);
}
if (sHelp) {
usage();
System.exit(EXIT_SUCCEEDED);
}
if (!sNoTimeout && !stdinHasData()) {
System.exit(EXIT_FAILED);
}
JournalFormatter formatter = new ProtoBufJournalFormatter();
try (JournalInputStream journalStream = formatter.deserialize(System.in)) {
JournalEntry entry;
while ((entry = journalStream.read()) != null) {
System.out.print(entry); // depends on control dependency: [while], data = [none]
System.out.println(ENTRY_SEPARATOR); // depends on control dependency: [while], data = [none]
}
}
} } |
public class class_name {
public void marshall(DiskSnapshot diskSnapshot, ProtocolMarshaller protocolMarshaller) {
if (diskSnapshot == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(diskSnapshot.getName(), NAME_BINDING);
protocolMarshaller.marshall(diskSnapshot.getArn(), ARN_BINDING);
protocolMarshaller.marshall(diskSnapshot.getSupportCode(), SUPPORTCODE_BINDING);
protocolMarshaller.marshall(diskSnapshot.getCreatedAt(), CREATEDAT_BINDING);
protocolMarshaller.marshall(diskSnapshot.getLocation(), LOCATION_BINDING);
protocolMarshaller.marshall(diskSnapshot.getResourceType(), RESOURCETYPE_BINDING);
protocolMarshaller.marshall(diskSnapshot.getTags(), TAGS_BINDING);
protocolMarshaller.marshall(diskSnapshot.getSizeInGb(), SIZEINGB_BINDING);
protocolMarshaller.marshall(diskSnapshot.getState(), STATE_BINDING);
protocolMarshaller.marshall(diskSnapshot.getProgress(), PROGRESS_BINDING);
protocolMarshaller.marshall(diskSnapshot.getFromDiskName(), FROMDISKNAME_BINDING);
protocolMarshaller.marshall(diskSnapshot.getFromDiskArn(), FROMDISKARN_BINDING);
protocolMarshaller.marshall(diskSnapshot.getFromInstanceName(), FROMINSTANCENAME_BINDING);
protocolMarshaller.marshall(diskSnapshot.getFromInstanceArn(), FROMINSTANCEARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DiskSnapshot diskSnapshot, ProtocolMarshaller protocolMarshaller) {
if (diskSnapshot == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(diskSnapshot.getName(), NAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getArn(), ARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getSupportCode(), SUPPORTCODE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getCreatedAt(), CREATEDAT_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getLocation(), LOCATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getResourceType(), RESOURCETYPE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getTags(), TAGS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getSizeInGb(), SIZEINGB_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getState(), STATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getProgress(), PROGRESS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getFromDiskName(), FROMDISKNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getFromDiskArn(), FROMDISKARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getFromInstanceName(), FROMINSTANCENAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(diskSnapshot.getFromInstanceArn(), FROMINSTANCEARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public LogEntry[] getEntriesBetween(long fromEntryIndex, long toEntryIndex) {
if (fromEntryIndex > toEntryIndex) {
throw new IllegalArgumentException("Illegal from entry index: " + fromEntryIndex + ", to entry index: "
+ toEntryIndex);
}
if (!containsLogEntry(fromEntryIndex)) {
throw new IllegalArgumentException("Illegal from entry index: " + fromEntryIndex);
}
if (fromEntryIndex > lastLogOrSnapshotIndex()) {
throw new IllegalArgumentException("Illegal from entry index: " + fromEntryIndex + ", last log index: "
+ lastLogOrSnapshotIndex());
}
if (toEntryIndex > lastLogOrSnapshotIndex()) {
throw new IllegalArgumentException("Illegal to entry index: " + toEntryIndex + ", last log index: "
+ lastLogOrSnapshotIndex());
}
assert ((int) (toEntryIndex - fromEntryIndex)) >= 0 : "Int overflow! From: " + fromEntryIndex + ", to: " + toEntryIndex;
LogEntry[] entries = new LogEntry[(int) (toEntryIndex - fromEntryIndex + 1)];
long offset = toSequence(fromEntryIndex);
for (int i = 0; i < entries.length; i++) {
entries[i] = logs.read(offset + i);
}
return entries;
} } | public class class_name {
public LogEntry[] getEntriesBetween(long fromEntryIndex, long toEntryIndex) {
if (fromEntryIndex > toEntryIndex) {
throw new IllegalArgumentException("Illegal from entry index: " + fromEntryIndex + ", to entry index: "
+ toEntryIndex);
}
if (!containsLogEntry(fromEntryIndex)) {
throw new IllegalArgumentException("Illegal from entry index: " + fromEntryIndex);
}
if (fromEntryIndex > lastLogOrSnapshotIndex()) {
throw new IllegalArgumentException("Illegal from entry index: " + fromEntryIndex + ", last log index: "
+ lastLogOrSnapshotIndex());
}
if (toEntryIndex > lastLogOrSnapshotIndex()) {
throw new IllegalArgumentException("Illegal to entry index: " + toEntryIndex + ", last log index: "
+ lastLogOrSnapshotIndex());
}
assert ((int) (toEntryIndex - fromEntryIndex)) >= 0 : "Int overflow! From: " + fromEntryIndex + ", to: " + toEntryIndex;
LogEntry[] entries = new LogEntry[(int) (toEntryIndex - fromEntryIndex + 1)];
long offset = toSequence(fromEntryIndex);
for (int i = 0; i < entries.length; i++) {
entries[i] = logs.read(offset + i); // depends on control dependency: [for], data = [i]
}
return entries;
} } |
public class class_name {
public void disconnect() {
try {
mContext.unbindService(mConnection);
} catch (Exception e) {
Log.w(getClass().getSimpleName(), "Exception unbinding from service: ", e);
}
} } | public class class_name {
public void disconnect() {
try {
mContext.unbindService(mConnection); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
Log.w(getClass().getSimpleName(), "Exception unbinding from service: ", e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static void copy(Path sourcePath, Path targetPath, boolean executable) throws IOException {
// we unwrap the file system to get raw streams without safety net
FileSystem sFS = FileSystem.getUnguardedFileSystem(sourcePath.toUri());
FileSystem tFS = FileSystem.getUnguardedFileSystem(targetPath.toUri());
if (!tFS.exists(targetPath)) {
if (sFS.getFileStatus(sourcePath).isDir()) {
internalCopyDirectory(sourcePath, targetPath, executable, sFS, tFS);
} else {
internalCopyFile(sourcePath, targetPath, executable, sFS, tFS);
}
}
} } | public class class_name {
public static void copy(Path sourcePath, Path targetPath, boolean executable) throws IOException {
// we unwrap the file system to get raw streams without safety net
FileSystem sFS = FileSystem.getUnguardedFileSystem(sourcePath.toUri());
FileSystem tFS = FileSystem.getUnguardedFileSystem(targetPath.toUri());
if (!tFS.exists(targetPath)) {
if (sFS.getFileStatus(sourcePath).isDir()) {
internalCopyDirectory(sourcePath, targetPath, executable, sFS, tFS); // depends on control dependency: [if], data = [none]
} else {
internalCopyFile(sourcePath, targetPath, executable, sFS, tFS); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public synchronized void run() {
while (!shutdown) {
if (jobList.isEmpty()) {
linger();
} else {
ScheduledJobEntry sje = (ScheduledJobEntry) jobList.get(0);
long now = System.currentTimeMillis();
if (now >= sje.desiredExecutionTime) {
executeInABox(sje.job);
jobList.remove(0);
if (sje.period > 0) {
sje.desiredExecutionTime = now + sje.period;
schedule(sje);
}
} else {
linger(sje.desiredExecutionTime - now);
}
}
}
// clear out the job list to facilitate garbage collection
jobList.clear();
jobList = null;
System.out.println("Leaving scheduler run method");
} } | public class class_name {
public synchronized void run() {
while (!shutdown) {
if (jobList.isEmpty()) {
linger(); // depends on control dependency: [if], data = [none]
} else {
ScheduledJobEntry sje = (ScheduledJobEntry) jobList.get(0);
long now = System.currentTimeMillis();
if (now >= sje.desiredExecutionTime) {
executeInABox(sje.job); // depends on control dependency: [if], data = [none]
jobList.remove(0); // depends on control dependency: [if], data = [none]
if (sje.period > 0) {
sje.desiredExecutionTime = now + sje.period; // depends on control dependency: [if], data = [none]
schedule(sje); // depends on control dependency: [if], data = [none]
}
} else {
linger(sje.desiredExecutionTime - now); // depends on control dependency: [if], data = [none]
}
}
}
// clear out the job list to facilitate garbage collection
jobList.clear();
jobList = null;
System.out.println("Leaving scheduler run method");
} } |
public class class_name {
public void load() {
ClassLoader loader;
Class c;
Method m;
Object[] tables;
if (isLoaded()) {
return;
}
loader = Mapper.class.getClassLoader();
try {
c = loader.loadClass(name);
} catch (ClassNotFoundException e) {
throw new IllegalStateException(name + ": cannot load class: " + e.getMessage(), e);
}
try {
m = c.getMethod("load", new Class[]{});
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e.getMessage(), e);
}
try {
tables = (Object[]) m.invoke(null, new Object[] {});
} catch (InvocationTargetException e) {
throw new IllegalStateException(e.getTargetException().getMessage(), e);
} catch (IllegalAccessException e) {
throw new IllegalStateException(e.getMessage(), e);
}
parser = (Parser) tables[0];
oag = (Oag) tables[1];
} } | public class class_name {
public void load() {
ClassLoader loader;
Class c;
Method m;
Object[] tables;
if (isLoaded()) {
return; // depends on control dependency: [if], data = [none]
}
loader = Mapper.class.getClassLoader();
try {
c = loader.loadClass(name); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
throw new IllegalStateException(name + ": cannot load class: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
try {
m = c.getMethod("load", new Class[]{}); // depends on control dependency: [try], data = [none]
} catch (NoSuchMethodException e) {
throw new IllegalStateException(e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
try {
tables = (Object[]) m.invoke(null, new Object[] {}); // depends on control dependency: [try], data = [none]
} catch (InvocationTargetException e) {
throw new IllegalStateException(e.getTargetException().getMessage(), e);
} catch (IllegalAccessException e) { // depends on control dependency: [catch], data = [none]
throw new IllegalStateException(e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
parser = (Parser) tables[0];
oag = (Oag) tables[1];
} } |
public class class_name {
public CollectionResult<double[]> run(Database database, Relation<O> rel) {
DistanceQuery<O> dq = rel.getDistanceQuery(getDistanceFunction());
int size = rel.size();
long pairs = (size * (long) size) >> 1;
final long ssize = sampling <= 1 ? (long) Math.ceil(sampling * pairs) : (long) sampling;
if(ssize > Integer.MAX_VALUE) {
throw new AbortException("Sampling size too large.");
}
final int qsize = quantile <= 0 ? 1 : (int) Math.ceil(quantile * ssize);
DoubleMaxHeap heap = new DoubleMaxHeap(qsize);
ArrayDBIDs ids = DBIDUtil.ensureArray(rel.getDBIDs());
DBIDArrayIter i1 = ids.iter(), i2 = ids.iter();
Random r = rand.getSingleThreadedRandom();
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Sampling", (int) ssize, LOG) : null;
for(long i = 0; i < ssize; i++) {
int x = r.nextInt(size - 1) + 1, y = r.nextInt(x);
double dist = dq.distance(i1.seek(x), i2.seek(y));
// Skip NaN, and/or zeros.
if(dist != dist || (nozeros && dist < Double.MIN_NORMAL)) {
continue;
}
heap.add(dist, qsize);
LOG.incrementProcessed(prog);
}
LOG.statistics(new DoubleStatistic(PREFIX + ".quantile", quantile));
LOG.statistics(new LongStatistic(PREFIX + ".samplesize", ssize));
LOG.statistics(new DoubleStatistic(PREFIX + ".distance", heap.peek()));
LOG.ensureCompleted(prog);
Collection<String> header = Arrays.asList(new String[] { "Distance" });
Collection<double[]> data = Arrays.asList(new double[][] { new double[] { heap.peek() } });
return new CollectionResult<double[]>("Distances sample", "distance-sample", data, header);
} } | public class class_name {
public CollectionResult<double[]> run(Database database, Relation<O> rel) {
DistanceQuery<O> dq = rel.getDistanceQuery(getDistanceFunction());
int size = rel.size();
long pairs = (size * (long) size) >> 1;
final long ssize = sampling <= 1 ? (long) Math.ceil(sampling * pairs) : (long) sampling;
if(ssize > Integer.MAX_VALUE) {
throw new AbortException("Sampling size too large.");
}
final int qsize = quantile <= 0 ? 1 : (int) Math.ceil(quantile * ssize);
DoubleMaxHeap heap = new DoubleMaxHeap(qsize);
ArrayDBIDs ids = DBIDUtil.ensureArray(rel.getDBIDs());
DBIDArrayIter i1 = ids.iter(), i2 = ids.iter();
Random r = rand.getSingleThreadedRandom();
FiniteProgress prog = LOG.isVerbose() ? new FiniteProgress("Sampling", (int) ssize, LOG) : null;
for(long i = 0; i < ssize; i++) {
int x = r.nextInt(size - 1) + 1, y = r.nextInt(x);
double dist = dq.distance(i1.seek(x), i2.seek(y));
// Skip NaN, and/or zeros.
if(dist != dist || (nozeros && dist < Double.MIN_NORMAL)) {
continue;
}
heap.add(dist, qsize); // depends on control dependency: [for], data = [none]
LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none]
}
LOG.statistics(new DoubleStatistic(PREFIX + ".quantile", quantile));
LOG.statistics(new LongStatistic(PREFIX + ".samplesize", ssize));
LOG.statistics(new DoubleStatistic(PREFIX + ".distance", heap.peek()));
LOG.ensureCompleted(prog);
Collection<String> header = Arrays.asList(new String[] { "Distance" });
Collection<double[]> data = Arrays.asList(new double[][] { new double[] { heap.peek() } });
return new CollectionResult<double[]>("Distances sample", "distance-sample", data, header);
} } |
public class class_name {
static ExecutorService createExecutorService(final String threads) {
final int threadCount;
if (threads.endsWith("C")) {
threadCount = Integer.parseInt(threads.substring(0, threads.length() - 1))
* Runtime.getRuntime().availableProcessors();
} else {
threadCount = Integer.parseInt(threads);
}
final ClassLoader currentThreadContextClassloader = Thread.currentThread().getContextClassLoader();
final AtomicInteger threadIndex = new AtomicInteger(1);
return Executors.newFixedThreadPool(
threadCount,
r -> {
final Thread thread = new Thread(r);
thread.setName("javassist-processor-" + threadIndex.getAndIncrement());
thread.setContextClassLoader(currentThreadContextClassloader);
return thread;
});
} } | public class class_name {
static ExecutorService createExecutorService(final String threads) {
final int threadCount;
if (threads.endsWith("C")) {
threadCount = Integer.parseInt(threads.substring(0, threads.length() - 1))
* Runtime.getRuntime().availableProcessors(); // depends on control dependency: [if], data = [none]
} else {
threadCount = Integer.parseInt(threads); // depends on control dependency: [if], data = [none]
}
final ClassLoader currentThreadContextClassloader = Thread.currentThread().getContextClassLoader();
final AtomicInteger threadIndex = new AtomicInteger(1);
return Executors.newFixedThreadPool(
threadCount,
r -> {
final Thread thread = new Thread(r);
thread.setName("javassist-processor-" + threadIndex.getAndIncrement());
thread.setContextClassLoader(currentThreadContextClassloader);
return thread;
});
} } |
public class class_name {
Long getPrecisionAct() {
Integer temp = getPrecision();
if (temp == null) {
return ValuePool.getLong(Long.MAX_VALUE);
} else {
return ValuePool.getLong(temp.longValue());
}
} } | public class class_name {
Long getPrecisionAct() {
Integer temp = getPrecision();
if (temp == null) {
return ValuePool.getLong(Long.MAX_VALUE); // depends on control dependency: [if], data = [none]
} else {
return ValuePool.getLong(temp.longValue()); // depends on control dependency: [if], data = [(temp]
}
} } |
public class class_name {
public BaseBuffer doSeek(String strSeekSign, FieldTable vectorTable, KeyAreaInfo keyArea) throws DBException
{
BaseBuffer buffer = null;
int iLowestMatch = -1; // For non-exact matches
keyArea.setupKeyBuffer(null, Constants.TEMP_KEY_AREA);
if (keyArea.getUniqueKeyCode() != Constants.UNIQUE) // The main key is part of the comparison
vectorTable.getRecord().getKeyArea(Constants.MAIN_KEY_AREA).setupKeyBuffer(null, Constants.TEMP_KEY_AREA);
int iLowKey = 0;
int iHighKey = this.getRecordCount() - 1;
while (iLowKey <= iHighKey)
{
m_iIndex = (iLowKey + iHighKey) / 2;
buffer = (BaseBuffer)m_VectorObjects.elementAt(m_iIndex);
buffer.resetPosition(); // Being careful
buffer.bufferToFields(vectorTable.getRecord(), Constants.DONT_DISPLAY, Constants.READ_MOVE);
int iCompare = this.compareKeys(Constants.TEMP_KEY_AREA, strSeekSign, vectorTable, keyArea);
if (iCompare < 0)
iLowKey = m_iIndex + 1; // target key is smaller than this key
else if (iCompare > 0)
iHighKey = m_iIndex - 1; // target key is larger than this key
else // if (iCompare == 0)
{
if (strSeekSign.equals(">"))
{
iLowKey = m_iIndex + 1; // target key is larger than this key
iLowestMatch = iLowKey; // Not looking for an exact match (lowest so far)
}
else if (strSeekSign.equals("<"))
{
iHighKey = m_iIndex - 1; // target key is larger than this key
iLowestMatch = iHighKey; // Not looking for an exact match (lowest so far)
}
else
{
if (keyArea.getUniqueKeyCode() == Constants.UNIQUE)
{
return buffer; // Found a matching record
}
else
{
if ((strSeekSign == null) || (strSeekSign.equals("==")))
{ // Found an exact matching record
return buffer;
}
else
{ // Not looking for an exact match
iLowestMatch = m_iIndex; // lowest so far.
iHighKey = m_iIndex - 1; // target key is larger than this key
}
}
}
}
}
m_iIndex = iHighKey; // Point to next lower key
if (keyArea.getKeyOrder(Constants.MAIN_KEY_FIELD) == Constants.DESCENDING) // Rarely
m_iIndex = iLowKey; // Point to next higher key
if (iLowestMatch == -1)
{ // For non-exact searches, return the next/previous
if ((strSeekSign.equals(">=")) || (strSeekSign.equals(">")))
iLowestMatch = m_iIndex + 1;
if ((strSeekSign.equals("<=")) || (strSeekSign.equals("<")))
iLowestMatch = m_iIndex;
}
if (iLowestMatch != -1) if (iLowestMatch < m_VectorObjects.size())
{ // There was a non-exact match to this seek
m_iIndex = iLowestMatch;
buffer = (BaseBuffer)m_VectorObjects.elementAt(m_iIndex);
buffer.bufferToFields(vectorTable.getRecord(), Constants.DONT_DISPLAY, Constants.READ_MOVE);
return buffer; // Match (for a non-exact search)
}
return null; // No match
} } | public class class_name {
public BaseBuffer doSeek(String strSeekSign, FieldTable vectorTable, KeyAreaInfo keyArea) throws DBException
{
BaseBuffer buffer = null;
int iLowestMatch = -1; // For non-exact matches
keyArea.setupKeyBuffer(null, Constants.TEMP_KEY_AREA);
if (keyArea.getUniqueKeyCode() != Constants.UNIQUE) // The main key is part of the comparison
vectorTable.getRecord().getKeyArea(Constants.MAIN_KEY_AREA).setupKeyBuffer(null, Constants.TEMP_KEY_AREA);
int iLowKey = 0;
int iHighKey = this.getRecordCount() - 1;
while (iLowKey <= iHighKey)
{
m_iIndex = (iLowKey + iHighKey) / 2;
buffer = (BaseBuffer)m_VectorObjects.elementAt(m_iIndex);
buffer.resetPosition(); // Being careful
buffer.bufferToFields(vectorTable.getRecord(), Constants.DONT_DISPLAY, Constants.READ_MOVE);
int iCompare = this.compareKeys(Constants.TEMP_KEY_AREA, strSeekSign, vectorTable, keyArea);
if (iCompare < 0)
iLowKey = m_iIndex + 1; // target key is smaller than this key
else if (iCompare > 0)
iHighKey = m_iIndex - 1; // target key is larger than this key
else // if (iCompare == 0)
{
if (strSeekSign.equals(">"))
{
iLowKey = m_iIndex + 1; // target key is larger than this key // depends on control dependency: [if], data = [none]
iLowestMatch = iLowKey; // Not looking for an exact match (lowest so far) // depends on control dependency: [if], data = [none]
}
else if (strSeekSign.equals("<"))
{
iHighKey = m_iIndex - 1; // target key is larger than this key // depends on control dependency: [if], data = [none]
iLowestMatch = iHighKey; // Not looking for an exact match (lowest so far) // depends on control dependency: [if], data = [none]
}
else
{
if (keyArea.getUniqueKeyCode() == Constants.UNIQUE)
{
return buffer; // Found a matching record // depends on control dependency: [if], data = [none]
}
else
{
if ((strSeekSign == null) || (strSeekSign.equals("==")))
{ // Found an exact matching record
return buffer; // depends on control dependency: [if], data = [none]
}
else
{ // Not looking for an exact match
iLowestMatch = m_iIndex; // lowest so far. // depends on control dependency: [if], data = [none]
iHighKey = m_iIndex - 1; // target key is larger than this key // depends on control dependency: [if], data = [none]
}
}
}
}
}
m_iIndex = iHighKey; // Point to next lower key
if (keyArea.getKeyOrder(Constants.MAIN_KEY_FIELD) == Constants.DESCENDING) // Rarely
m_iIndex = iLowKey; // Point to next higher key
if (iLowestMatch == -1)
{ // For non-exact searches, return the next/previous
if ((strSeekSign.equals(">=")) || (strSeekSign.equals(">")))
iLowestMatch = m_iIndex + 1;
if ((strSeekSign.equals("<=")) || (strSeekSign.equals("<")))
iLowestMatch = m_iIndex;
}
if (iLowestMatch != -1) if (iLowestMatch < m_VectorObjects.size())
{ // There was a non-exact match to this seek
m_iIndex = iLowestMatch;
buffer = (BaseBuffer)m_VectorObjects.elementAt(m_iIndex);
buffer.bufferToFields(vectorTable.getRecord(), Constants.DONT_DISPLAY, Constants.READ_MOVE);
return buffer; // Match (for a non-exact search)
}
return null; // No match
} } |
public class class_name {
public String getTemplateNameForResult(Route route, Result response) {
String template = response.template();
if (template == null) {
if (route != null) {
String controllerPath = RouterHelper.getReverseRouteFast(route.getController());
// Look for a controller named template first
if (new File(realPath + controllerPath + templateSuffix).exists()) {
return controllerPath;
}
//TODO Route(r).reverseRoute
return controllerPath + "/" + route.getActionName();
} else {
return null;
}
} else {
if (template.endsWith(templateSuffix)) {
return template.substring(0, template.length() - templateSuffixLengthToRemove);
}
return template;
}
} } | public class class_name {
public String getTemplateNameForResult(Route route, Result response) {
String template = response.template();
if (template == null) {
if (route != null) {
String controllerPath = RouterHelper.getReverseRouteFast(route.getController());
// Look for a controller named template first
if (new File(realPath + controllerPath + templateSuffix).exists()) {
return controllerPath; // depends on control dependency: [if], data = [none]
}
//TODO Route(r).reverseRoute
return controllerPath + "/" + route.getActionName(); // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} else {
if (template.endsWith(templateSuffix)) {
return template.substring(0, template.length() - templateSuffixLengthToRemove); // depends on control dependency: [if], data = [none]
}
return template; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected static XMemberFeatureCall getRootOfMemberFeatureCallSequence(EObject leaf, EObject container,
Procedure1<XMemberFeatureCall> feedback) {
EObject call = leaf;
EObject obj = EcoreUtil2.getContainerOfType(leaf.eContainer(), XExpression.class);
while (obj != null && (container == null || obj != container)) {
if (!(obj instanceof XMemberFeatureCall)) {
obj = null;
} else {
final EObject previous = call;
final XMemberFeatureCall fcall = (XMemberFeatureCall) obj;
call = fcall;
if (fcall.getActualReceiver() == previous) {
// Sequence of calls, with the '.' char.
if (feedback != null) {
feedback.apply(fcall);
}
obj = EcoreUtil2.getContainerOfType(call.eContainer(), XExpression.class);
} else if (fcall.getActualArguments().contains(previous)) {
// The sequence is an argument of a function call.
call = previous;
obj = null;
} else {
obj = null;
}
}
}
return call instanceof XMemberFeatureCall ? (XMemberFeatureCall) call : null;
} } | public class class_name {
protected static XMemberFeatureCall getRootOfMemberFeatureCallSequence(EObject leaf, EObject container,
Procedure1<XMemberFeatureCall> feedback) {
EObject call = leaf;
EObject obj = EcoreUtil2.getContainerOfType(leaf.eContainer(), XExpression.class);
while (obj != null && (container == null || obj != container)) {
if (!(obj instanceof XMemberFeatureCall)) {
obj = null; // depends on control dependency: [if], data = [none]
} else {
final EObject previous = call;
final XMemberFeatureCall fcall = (XMemberFeatureCall) obj;
call = fcall; // depends on control dependency: [if], data = [none]
if (fcall.getActualReceiver() == previous) {
// Sequence of calls, with the '.' char.
if (feedback != null) {
feedback.apply(fcall); // depends on control dependency: [if], data = [none]
}
obj = EcoreUtil2.getContainerOfType(call.eContainer(), XExpression.class); // depends on control dependency: [if], data = [none]
} else if (fcall.getActualArguments().contains(previous)) {
// The sequence is an argument of a function call.
call = previous; // depends on control dependency: [if], data = [none]
obj = null; // depends on control dependency: [if], data = [none]
} else {
obj = null; // depends on control dependency: [if], data = [none]
}
}
}
return call instanceof XMemberFeatureCall ? (XMemberFeatureCall) call : null;
} } |
public class class_name {
public static boolean clearDirectory(File directory) {
if (directory.isDirectory()) {
for (File file : directory.listFiles()) {
if (!deleteRecursive(file)) {
return false;
}
}
}
return true;
} } | public class class_name {
public static boolean clearDirectory(File directory) {
if (directory.isDirectory()) {
for (File file : directory.listFiles()) {
if (!deleteRecursive(file)) {
return false; // depends on control dependency: [if], data = [none]
}
}
}
return true;
} } |
public class class_name {
void calculateValueAt(double x) {
int len = points.size();
for (int i = 1; i < len; i++) {
Point p1 = points.get(i - 1);
Point p2 = points.get(i);
if (x >= p1.getX().doubleValue() && x <= p2.getX().doubleValue()) {
// calculate the slope intercept form
double m = (p2.getY().doubleValue() - p1.getY().doubleValue()) / (p2.getX().doubleValue() - p1.getX().doubleValue());
double b = p1.getY().doubleValue() - m * p1.getX().doubleValue();
double interpolated = m * x + b;
// apply the hedge if we have one
if(haveHedge()) {
interpolated = getHedge().calculateValue(interpolated);
}
exploded.add(interpolated);
return;
}
}
} } | public class class_name {
void calculateValueAt(double x) {
int len = points.size();
for (int i = 1; i < len; i++) {
Point p1 = points.get(i - 1);
Point p2 = points.get(i);
if (x >= p1.getX().doubleValue() && x <= p2.getX().doubleValue()) {
// calculate the slope intercept form
double m = (p2.getY().doubleValue() - p1.getY().doubleValue()) / (p2.getX().doubleValue() - p1.getX().doubleValue());
double b = p1.getY().doubleValue() - m * p1.getX().doubleValue();
double interpolated = m * x + b;
// apply the hedge if we have one
if(haveHedge()) {
interpolated = getHedge().calculateValue(interpolated); // depends on control dependency: [if], data = [none]
}
exploded.add(interpolated); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public Mac acquire(String algorithm) {
Mac result = null;
Queue<Mac> pool = this.macPoolByAlgorithm.get(algorithm);
if (pool != null) {
result = pool.poll();
}
if (result != null) {
// it has to be synchronized, for the case that some memory parts of Mac provider are changed,
// but not yet visible for this thread
synchronized (result) {
result.reset();
}
return result;
}
try {
return Mac.getInstance(algorithm);
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException(String.format("NoSuchAlgorithm '%s':", algorithm), e);
}
} } | public class class_name {
public Mac acquire(String algorithm) {
Mac result = null;
Queue<Mac> pool = this.macPoolByAlgorithm.get(algorithm);
if (pool != null) {
result = pool.poll(); // depends on control dependency: [if], data = [none]
}
if (result != null) {
// it has to be synchronized, for the case that some memory parts of Mac provider are changed,
// but not yet visible for this thread
synchronized (result) { // depends on control dependency: [if], data = [(result]
result.reset();
}
return result; // depends on control dependency: [if], data = [none]
}
try {
return Mac.getInstance(algorithm); // depends on control dependency: [try], data = [none]
} catch (NoSuchAlgorithmException e) {
throw new IllegalArgumentException(String.format("NoSuchAlgorithm '%s':", algorithm), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public HttpResponse doHttpCall(final String urlString,
final String stringToSend, final String user, final String pwd,
final String proxyHost, final int proxyPort,
final Map<String, String> httpHeaders) throws IOException {
HttpURLConnection connection = null;
HttpResponse result = new HttpResponse();
try {
URL url = new URL(urlString);
if (!StringUtils.isEmpty(proxyHost)) {
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(
proxyHost, proxyPort));
connection = (HttpURLConnection) url.openConnection(proxy);
} else {
connection = (HttpURLConnection) url.openConnection();
}
connection.setRequestProperty("Content-Length",
String.valueOf(stringToSend.length()));
connection.setRequestProperty("Content-Type", "text/xml");
connection.setRequestProperty("Connection", "Close");
connection.setRequestProperty("SoapAction", "");
connection.setDoOutput(true);
connection.setUseCaches(false);
Set<Entry<String, String>> entrySet = httpHeaders.entrySet();
for (Entry<String, String> key : entrySet) {
connection.setRequestProperty(key.getKey(), key.getValue());
}
if (!StringUtils.isEmpty(user) && !StringUtils.isEmpty(pwd)) {
String auth = (user + ":" + pwd);
String encoding = new Base64Encoder().encode(auth
.getBytes("UTF-8"));
connection.setRequestProperty("Authorization", "Basic "
+ encoding);
}
// Send request
DataOutputStream wr = new DataOutputStream(
connection.getOutputStream());
wr.writeBytes(stringToSend);
wr.flush();
wr.close();
// Get Response
LOG.info("response code = " + connection.getResponseCode());
InputStream is;
result.setResponseCode(connection.getResponseCode());
if (connection.getResponseCode() <= HTTP_400) {
is = connection.getInputStream();
} else {
/* error from server */
is = connection.getErrorStream();
}
BufferedReader rd = new BufferedReader(new InputStreamReader(is,
"UTF-8"));
String line;
StringBuilder response = new StringBuilder();
while ((line = rd.readLine()) != null) {
response.append(line);
response.append('\r');
}
rd.close();
LOG.info("response " + response.toString());
result.setResponseContent(response.toString());
} finally {
if (connection != null) {
connection.disconnect();
}
}
return result;
} } | public class class_name {
public HttpResponse doHttpCall(final String urlString,
final String stringToSend, final String user, final String pwd,
final String proxyHost, final int proxyPort,
final Map<String, String> httpHeaders) throws IOException {
HttpURLConnection connection = null;
HttpResponse result = new HttpResponse();
try {
URL url = new URL(urlString);
if (!StringUtils.isEmpty(proxyHost)) {
Proxy proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(
proxyHost, proxyPort));
connection = (HttpURLConnection) url.openConnection(proxy); // depends on control dependency: [if], data = [none]
} else {
connection = (HttpURLConnection) url.openConnection(); // depends on control dependency: [if], data = [none]
}
connection.setRequestProperty("Content-Length",
String.valueOf(stringToSend.length()));
connection.setRequestProperty("Content-Type", "text/xml");
connection.setRequestProperty("Connection", "Close");
connection.setRequestProperty("SoapAction", "");
connection.setDoOutput(true);
connection.setUseCaches(false);
Set<Entry<String, String>> entrySet = httpHeaders.entrySet();
for (Entry<String, String> key : entrySet) {
connection.setRequestProperty(key.getKey(), key.getValue()); // depends on control dependency: [for], data = [key]
}
if (!StringUtils.isEmpty(user) && !StringUtils.isEmpty(pwd)) {
String auth = (user + ":" + pwd);
String encoding = new Base64Encoder().encode(auth
.getBytes("UTF-8"));
connection.setRequestProperty("Authorization", "Basic "
+ encoding); // depends on control dependency: [if], data = [none]
}
// Send request
DataOutputStream wr = new DataOutputStream(
connection.getOutputStream());
wr.writeBytes(stringToSend);
wr.flush();
wr.close();
// Get Response
LOG.info("response code = " + connection.getResponseCode());
InputStream is;
result.setResponseCode(connection.getResponseCode());
if (connection.getResponseCode() <= HTTP_400) {
is = connection.getInputStream(); // depends on control dependency: [if], data = [none]
} else {
/* error from server */
is = connection.getErrorStream(); // depends on control dependency: [if], data = [none]
}
BufferedReader rd = new BufferedReader(new InputStreamReader(is,
"UTF-8"));
String line;
StringBuilder response = new StringBuilder();
while ((line = rd.readLine()) != null) {
response.append(line); // depends on control dependency: [while], data = [none]
response.append('\r'); // depends on control dependency: [while], data = [none]
}
rd.close();
LOG.info("response " + response.toString());
result.setResponseContent(response.toString());
} finally {
if (connection != null) {
connection.disconnect(); // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
public static boolean doesZipContainFileTypes(File file, String... fileTypes) {
// It must be a zip file with an XML file at it's root and a properties file in it
if (!file.getName().endsWith(".zip")) {
return false;
}
ZipFile zip = null;
try {
zip = new ZipFile(file);
boolean[] foundFileTypes = new boolean[fileTypes.length];
boolean foundAll = false;
Enumeration<? extends ZipEntry> entries = zip.entries();
while (entries.hasMoreElements() && !foundAll) {
foundAll = true;
ZipEntry entry = entries.nextElement();
String name = entry.getName();
for (int i = 0; i < fileTypes.length; i++) {
if (!foundFileTypes[i]) {
foundFileTypes[i] = name.endsWith(fileTypes[i]);
if (foundAll) {
foundAll = foundFileTypes[i];
}
}
}
}
return foundAll;
} catch (IOException e) {
return false;
} finally {
if (zip != null) {
try {
zip.close();
} catch (IOException e) {
return false;
}
}
}
} } | public class class_name {
public static boolean doesZipContainFileTypes(File file, String... fileTypes) {
// It must be a zip file with an XML file at it's root and a properties file in it
if (!file.getName().endsWith(".zip")) {
return false; // depends on control dependency: [if], data = [none]
}
ZipFile zip = null;
try {
zip = new ZipFile(file); // depends on control dependency: [try], data = [none]
boolean[] foundFileTypes = new boolean[fileTypes.length];
boolean foundAll = false;
Enumeration<? extends ZipEntry> entries = zip.entries(); // depends on control dependency: [try], data = [none]
while (entries.hasMoreElements() && !foundAll) {
foundAll = true; // depends on control dependency: [while], data = [none]
ZipEntry entry = entries.nextElement();
String name = entry.getName();
for (int i = 0; i < fileTypes.length; i++) {
if (!foundFileTypes[i]) {
foundFileTypes[i] = name.endsWith(fileTypes[i]); // depends on control dependency: [if], data = [none]
if (foundAll) {
foundAll = foundFileTypes[i]; // depends on control dependency: [if], data = [none]
}
}
}
}
return foundAll; // depends on control dependency: [try], data = [none]
} catch (IOException e) {
return false;
} finally { // depends on control dependency: [catch], data = [none]
if (zip != null) {
try {
zip.close(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
return false;
} // depends on control dependency: [catch], data = [none]
}
}
} } |
public class class_name {
private Connection getConnection(InetSocketAddress addr,
Class<?> protocol,
UserGroupInformation ticket,
int rpcTimeout,
Call call)
throws IOException {
if (!running.get()) {
// the client is stopped
throw new IOException("The client is stopped");
}
Connection connection;
/* we could avoid this allocation for each RPC by having a
* connectionsId object and with set() method. We need to manage the
* refs for keys in HashMap properly. For now its ok.
*/
ConnectionId remoteId = new ConnectionId(
addr, protocol, ticket, rpcTimeout);
do {
synchronized (connections) {
connection = connections.get(remoteId);
if (connection == null) {
connection = new Connection(remoteId);
connections.put(remoteId, connection);
}
}
} while (!connection.addCall(call));
//we don't invoke the method below inside "synchronized (connections)"
//block above. The reason for that is if the server happens to be slow,
//it will take longer to establish a connection and that will slow the
//entire system down.
boolean success = false;
try {
connection.setupIOstreams();
success = true;
} finally {
if (!success) {
// We need to make sure connection is removed from connections object if
// there is any unhandled exception (like OOM). In this case, unlikely
// there is response thread started which can clear the connection
// eventually.
try {
if (connection.closeException == null) {
connection.markClosed(new IOException(
"Unexpected error when setup IO Stream"));
}
} finally {
connection.close(true);
}
}
}
return connection;
} } | public class class_name {
private Connection getConnection(InetSocketAddress addr,
Class<?> protocol,
UserGroupInformation ticket,
int rpcTimeout,
Call call)
throws IOException {
if (!running.get()) {
// the client is stopped
throw new IOException("The client is stopped");
}
Connection connection;
/* we could avoid this allocation for each RPC by having a
* connectionsId object and with set() method. We need to manage the
* refs for keys in HashMap properly. For now its ok.
*/
ConnectionId remoteId = new ConnectionId(
addr, protocol, ticket, rpcTimeout);
do {
synchronized (connections) {
connection = connections.get(remoteId);
if (connection == null) {
connection = new Connection(remoteId); // depends on control dependency: [if], data = [none]
connections.put(remoteId, connection); // depends on control dependency: [if], data = [none]
}
}
} while (!connection.addCall(call));
//we don't invoke the method below inside "synchronized (connections)"
//block above. The reason for that is if the server happens to be slow,
//it will take longer to establish a connection and that will slow the
//entire system down.
boolean success = false;
try {
connection.setupIOstreams();
success = true;
} finally {
if (!success) {
// We need to make sure connection is removed from connections object if
// there is any unhandled exception (like OOM). In this case, unlikely
// there is response thread started which can clear the connection
// eventually.
try {
if (connection.closeException == null) {
connection.markClosed(new IOException(
"Unexpected error when setup IO Stream")); // depends on control dependency: [if], data = [none]
}
} finally {
connection.close(true);
}
}
}
return connection;
} } |
public class class_name {
public void addValidator(final FieldValidator validator) {
WComponent labelField = findComponentForLabel(field);
if (labelField instanceof Input) {
((Input) labelField).addValidator(validator);
}
} } | public class class_name {
public void addValidator(final FieldValidator validator) {
WComponent labelField = findComponentForLabel(field);
if (labelField instanceof Input) {
((Input) labelField).addValidator(validator); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void removeComputer(final Computer computer) {
Queue.withLock(new Runnable() {
@Override
public void run() {
Map<Node,Computer> computers = getComputerMap();
for (Map.Entry<Node, Computer> e : computers.entrySet()) {
if (e.getValue() == computer) {
computers.remove(e.getKey());
computer.onRemoved();
return;
}
}
}
});
} } | public class class_name {
void removeComputer(final Computer computer) {
Queue.withLock(new Runnable() {
@Override
public void run() {
Map<Node,Computer> computers = getComputerMap();
for (Map.Entry<Node, Computer> e : computers.entrySet()) {
if (e.getValue() == computer) {
computers.remove(e.getKey()); // depends on control dependency: [if], data = [none]
computer.onRemoved(); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
}
});
} } |
public class class_name {
public Concatenator BR_CLOSE() {
if (((PredicateExpression)this.astNode).getParent() != null) {
Concatenator ret = new Concatenator(((PredicateExpression)this.astNode).getParent());
return ret;
} else
throw new RuntimeException("No matching open bracket");
} } | public class class_name {
public Concatenator BR_CLOSE() {
if (((PredicateExpression)this.astNode).getParent() != null) {
Concatenator ret = new Concatenator(((PredicateExpression)this.astNode).getParent());
return ret;
// depends on control dependency: [if], data = [none]
} else
throw new RuntimeException("No matching open bracket");
} } |
public class class_name {
public static IProcessingInstructionProcessor unwrap(final IProcessingInstructionProcessor processor) {
if (processor == null) {
return null;
}
if (processor instanceof AbstractProcessorWrapper) {
return (IProcessingInstructionProcessor)((AbstractProcessorWrapper) processor).unwrap();
}
return processor;
} } | public class class_name {
public static IProcessingInstructionProcessor unwrap(final IProcessingInstructionProcessor processor) {
if (processor == null) {
return null; // depends on control dependency: [if], data = [none]
}
if (processor instanceof AbstractProcessorWrapper) {
return (IProcessingInstructionProcessor)((AbstractProcessorWrapper) processor).unwrap(); // depends on control dependency: [if], data = [none]
}
return processor;
} } |
public class class_name {
private AuthenticationInfo getUncachedAuthenticationInfoForKey(String authenicationId) {
ApiKey apiKey = _authIdentityReader.getIdentityByAuthenticationId(authenicationId);
if (apiKey == null) {
return null;
}
return createAuthenticationInfo(authenicationId, apiKey);
} } | public class class_name {
private AuthenticationInfo getUncachedAuthenticationInfoForKey(String authenicationId) {
ApiKey apiKey = _authIdentityReader.getIdentityByAuthenticationId(authenicationId);
if (apiKey == null) {
return null; // depends on control dependency: [if], data = [none]
}
return createAuthenticationInfo(authenicationId, apiKey);
} } |
public class class_name {
@WillCloseWhenClosed
public static void sleepFor(long timeout, TimeUnit unit) {
try {
unit.sleep(timeout);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
} } | public class class_name {
@WillCloseWhenClosed
public static void sleepFor(long timeout, TimeUnit unit) {
try {
unit.sleep(timeout); // depends on control dependency: [try], data = [none]
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void processFileCompressionTypes() throws SnowflakeSQLException
{
// see what user has told us about the source file compression types
boolean autoDetect = true;
FileCompressionType userSpecifiedSourceCompression = null;
if (SOURCE_COMPRESSION_AUTO_DETECT.equalsIgnoreCase(sourceCompression))
{
autoDetect = true;
}
else if (SOURCE_COMPRESSION_NONE.equalsIgnoreCase(sourceCompression))
{
autoDetect = false;
}
else
{
userSpecifiedSourceCompression =
FileCompressionType.lookupByMimeSubType(sourceCompression.toLowerCase());
if (userSpecifiedSourceCompression == null)
{
throw new SnowflakeSQLException(SqlState.FEATURE_NOT_SUPPORTED,
ErrorCode.COMPRESSION_TYPE_NOT_KNOWN.getMessageCode(),
sourceCompression);
}
else if (!userSpecifiedSourceCompression.isSupported())
{
throw new SnowflakeSQLException(SqlState.FEATURE_NOT_SUPPORTED,
ErrorCode.COMPRESSION_TYPE_NOT_SUPPORTED.getMessageCode(),
sourceCompression);
}
autoDetect = false;
}
if (!sourceFromStream)
{
for (String srcFile : sourceFiles)
{
FileMetadata fileMetadata = fileMetadataMap.get(srcFile);
if (fileMetadata.resultStatus == ResultStatus.NONEXIST ||
fileMetadata.resultStatus == ResultStatus.DIRECTORY)
{
continue;
}
File file = new File(srcFile);
String srcFileName = file.getName();
String mimeTypeStr = null;
FileCompressionType currentFileCompressionType = null;
try
{
if (autoDetect)
{
// probe the file for compression type using tika file type detector
mimeTypeStr = Files.probeContentType(file.toPath());
if (mimeTypeStr == null)
{
try (FileInputStream f = new FileInputStream(file))
{
byte[] magic = new byte[4];
if (f.read(magic, 0, 4) == 4)
{
if (Arrays.equals(magic, new byte[]{'P', 'A', 'R', '1'}))
{
mimeTypeStr = "snowflake/parquet";
}
else if (Arrays.equals(
Arrays.copyOfRange(magic, 0, 3), new byte[]{'O', 'R', 'C'}))
{
mimeTypeStr = "snowflake/orc";
}
}
}
}
if (mimeTypeStr != null)
{
logger.debug("Mime type for {} is: {}", srcFile, mimeTypeStr);
currentFileCompressionType = mimeTypeToCompressionType(mimeTypeStr);
}
// fallback: use file extension
if (currentFileCompressionType == null)
{
mimeTypeStr = getMimeTypeFromFileExtension(srcFile);
if (mimeTypeStr != null)
{
logger.debug("Mime type for {} is: {}", srcFile, mimeTypeStr);
currentFileCompressionType = mimeTypeToCompressionType(mimeTypeStr);
}
}
}
else
{
currentFileCompressionType = userSpecifiedSourceCompression;
}
// check if the compression type is supported by us
if (currentFileCompressionType != null)
{
fileMetadata.srcCompressionType = currentFileCompressionType;
if (currentFileCompressionType.isSupported())
{
// remember the compression type if supported
fileMetadata.destCompressionType = currentFileCompressionType;
fileMetadata.requireCompress = false;
fileMetadata.destFileName = srcFileName;
logger.debug("File compression detected as {} for: {}",
currentFileCompressionType.name(), srcFile);
}
else
{
// error if not supported
throw new SnowflakeSQLException(SqlState.FEATURE_NOT_SUPPORTED,
ErrorCode.COMPRESSION_TYPE_NOT_SUPPORTED.getMessageCode(),
currentFileCompressionType.name());
}
}
else
{
// we want to auto compress the files unless the user has disabled it
logger.debug("Compression not found for file: {}", srcFile);
// Set compress flag
fileMetadata.requireCompress = autoCompress;
fileMetadata.srcCompressionType = null;
if (autoCompress)
{
// We only support gzip auto compression
fileMetadata.destFileName = srcFileName +
FileCompressionType.GZIP.fileExtension;
fileMetadata.destCompressionType = FileCompressionType.GZIP;
}
else
{
fileMetadata.destFileName = srcFileName;
fileMetadata.destCompressionType = null;
}
}
}
catch (MimeTypeParseException ex)
{
logger.error(
"Exception encountered when processing file compression types",
ex);
fileMetadata.resultStatus = ResultStatus.ERROR;
fileMetadata.errorDetails = "Failed to parse mime type: " +
mimeTypeStr;
}
catch (Exception ex)
{
// SNOW-13146: don't log severe message for user error
if (ex instanceof SnowflakeSQLException)
{
logger.debug(
"Exception encountered when processing file compression types",
ex);
}
else
{
logger.debug(
"Exception encountered when processing file compression types",
ex);
}
fileMetadata.resultStatus = ResultStatus.ERROR;
fileMetadata.errorDetails = ex.getMessage();
}
}
}
else
{
// source from stream case
FileMetadata fileMetadata = fileMetadataMap.get(SRC_FILE_NAME_FOR_STREAM);
fileMetadata.srcCompressionType = userSpecifiedSourceCompression;
if (compressSourceFromStream)
{
fileMetadata.destCompressionType = FileCompressionType.GZIP;
fileMetadata.requireCompress = true;
}
else
{
fileMetadata.destCompressionType = userSpecifiedSourceCompression;
fileMetadata.requireCompress = false;
}
// add gz extension if file name doesn't have it
if (compressSourceFromStream &&
!destFileNameForStreamSource.endsWith(
FileCompressionType.GZIP.fileExtension))
{
fileMetadata.destFileName = destFileNameForStreamSource +
FileCompressionType.GZIP.fileExtension;
}
else
{
fileMetadata.destFileName = destFileNameForStreamSource;
}
}
} } | public class class_name {
private void processFileCompressionTypes() throws SnowflakeSQLException
{
// see what user has told us about the source file compression types
boolean autoDetect = true;
FileCompressionType userSpecifiedSourceCompression = null;
if (SOURCE_COMPRESSION_AUTO_DETECT.equalsIgnoreCase(sourceCompression))
{
autoDetect = true;
}
else if (SOURCE_COMPRESSION_NONE.equalsIgnoreCase(sourceCompression))
{
autoDetect = false;
}
else
{
userSpecifiedSourceCompression =
FileCompressionType.lookupByMimeSubType(sourceCompression.toLowerCase());
if (userSpecifiedSourceCompression == null)
{
throw new SnowflakeSQLException(SqlState.FEATURE_NOT_SUPPORTED,
ErrorCode.COMPRESSION_TYPE_NOT_KNOWN.getMessageCode(),
sourceCompression);
}
else if (!userSpecifiedSourceCompression.isSupported())
{
throw new SnowflakeSQLException(SqlState.FEATURE_NOT_SUPPORTED,
ErrorCode.COMPRESSION_TYPE_NOT_SUPPORTED.getMessageCode(),
sourceCompression);
}
autoDetect = false;
}
if (!sourceFromStream)
{
for (String srcFile : sourceFiles)
{
FileMetadata fileMetadata = fileMetadataMap.get(srcFile);
if (fileMetadata.resultStatus == ResultStatus.NONEXIST ||
fileMetadata.resultStatus == ResultStatus.DIRECTORY)
{
continue;
}
File file = new File(srcFile);
String srcFileName = file.getName();
String mimeTypeStr = null;
FileCompressionType currentFileCompressionType = null;
try
{
if (autoDetect)
{
// probe the file for compression type using tika file type detector
mimeTypeStr = Files.probeContentType(file.toPath()); // depends on control dependency: [if], data = [none]
if (mimeTypeStr == null)
{
try (FileInputStream f = new FileInputStream(file))
{
byte[] magic = new byte[4];
if (f.read(magic, 0, 4) == 4)
{
if (Arrays.equals(magic, new byte[]{'P', 'A', 'R', '1'}))
{
mimeTypeStr = "snowflake/parquet"; // depends on control dependency: [if], data = [none]
}
else if (Arrays.equals(
Arrays.copyOfRange(magic, 0, 3), new byte[]{'O', 'R', 'C'}))
{
mimeTypeStr = "snowflake/orc"; // depends on control dependency: [if], data = [none]
}
}
}
}
if (mimeTypeStr != null)
{
logger.debug("Mime type for {} is: {}", srcFile, mimeTypeStr); // depends on control dependency: [if], data = [none]
currentFileCompressionType = mimeTypeToCompressionType(mimeTypeStr); // depends on control dependency: [if], data = [(mimeTypeStr]
}
// fallback: use file extension
if (currentFileCompressionType == null)
{
mimeTypeStr = getMimeTypeFromFileExtension(srcFile); // depends on control dependency: [if], data = [none]
if (mimeTypeStr != null)
{
logger.debug("Mime type for {} is: {}", srcFile, mimeTypeStr); // depends on control dependency: [if], data = [none]
currentFileCompressionType = mimeTypeToCompressionType(mimeTypeStr); // depends on control dependency: [if], data = [(mimeTypeStr]
}
}
}
else
{
currentFileCompressionType = userSpecifiedSourceCompression;
}
// check if the compression type is supported by us
if (currentFileCompressionType != null)
{
fileMetadata.srcCompressionType = currentFileCompressionType; // depends on control dependency: [if], data = [none]
if (currentFileCompressionType.isSupported())
{
// remember the compression type if supported
fileMetadata.destCompressionType = currentFileCompressionType; // depends on control dependency: [if], data = [none]
fileMetadata.requireCompress = false; // depends on control dependency: [if], data = [none]
fileMetadata.destFileName = srcFileName; // depends on control dependency: [if], data = [none]
logger.debug("File compression detected as {} for: {}",
currentFileCompressionType.name(), srcFile); // depends on control dependency: [if], data = [none]
}
else
{
// error if not supported
throw new SnowflakeSQLException(SqlState.FEATURE_NOT_SUPPORTED,
ErrorCode.COMPRESSION_TYPE_NOT_SUPPORTED.getMessageCode(),
currentFileCompressionType.name());
}
}
else
{
// we want to auto compress the files unless the user has disabled it
logger.debug("Compression not found for file: {}", srcFile); // depends on control dependency: [if], data = [none]
// Set compress flag
fileMetadata.requireCompress = autoCompress; // depends on control dependency: [if], data = [none]
fileMetadata.srcCompressionType = null; // depends on control dependency: [if], data = [none]
if (autoCompress)
{
// We only support gzip auto compression
fileMetadata.destFileName = srcFileName +
FileCompressionType.GZIP.fileExtension; // depends on control dependency: [if], data = [none]
fileMetadata.destCompressionType = FileCompressionType.GZIP; // depends on control dependency: [if], data = [none]
}
else
{
fileMetadata.destFileName = srcFileName; // depends on control dependency: [if], data = [none]
fileMetadata.destCompressionType = null; // depends on control dependency: [if], data = [none]
}
}
}
catch (MimeTypeParseException ex)
{
logger.error(
"Exception encountered when processing file compression types",
ex);
fileMetadata.resultStatus = ResultStatus.ERROR;
fileMetadata.errorDetails = "Failed to parse mime type: " +
mimeTypeStr;
}
catch (Exception ex)
{
// SNOW-13146: don't log severe message for user error
if (ex instanceof SnowflakeSQLException)
{
logger.debug(
"Exception encountered when processing file compression types",
ex); // depends on control dependency: [if], data = [none]
}
else
{
logger.debug(
"Exception encountered when processing file compression types",
ex); // depends on control dependency: [if], data = [none]
}
fileMetadata.resultStatus = ResultStatus.ERROR;
fileMetadata.errorDetails = ex.getMessage();
}
}
}
else
{
// source from stream case
FileMetadata fileMetadata = fileMetadataMap.get(SRC_FILE_NAME_FOR_STREAM);
fileMetadata.srcCompressionType = userSpecifiedSourceCompression;
if (compressSourceFromStream)
{
fileMetadata.destCompressionType = FileCompressionType.GZIP;
fileMetadata.requireCompress = true;
}
else
{
fileMetadata.destCompressionType = userSpecifiedSourceCompression;
fileMetadata.requireCompress = false;
}
// add gz extension if file name doesn't have it
if (compressSourceFromStream &&
!destFileNameForStreamSource.endsWith(
FileCompressionType.GZIP.fileExtension))
{
fileMetadata.destFileName = destFileNameForStreamSource +
FileCompressionType.GZIP.fileExtension;
}
else
{
fileMetadata.destFileName = destFileNameForStreamSource;
}
}
} } |
public class class_name {
private AttributeDefinition renameAttribute(String currDefId, ExtendedAttributeDefinition oldAttribute, ExtendedAttributeDefinition newAttribute, boolean required) {
AttributeDefinitionSpecification result = new AttributeDefinitionSpecification();
// Set some original values
result.setId(newAttribute.getID());
result.setType(oldAttribute.getType());
result.setRequired(required);
if (result.getType() != newAttribute.getType()) {
// If the type of the new Attribute doesn't match the type of the original we need to issue a warning.
error("schemagen.invalid.type.override", new Object[] { newAttribute.getID(), currDefId, result.getType() });
}
result.setName((newAttribute.getName() != null) ? newAttribute.getName() : oldAttribute.getName());
result.setDescription((newAttribute.getDescription() != null) ? newAttribute.getDescription() : oldAttribute.getDescription());
result.setDefaultValue((newAttribute.getDefaultValue() != null) ? newAttribute.getDefaultValue() : oldAttribute.getDefaultValue());
result.setCardinality((newAttribute.getCardinality() != 0) ? newAttribute.getCardinality() : oldAttribute.getCardinality());
// We need to build up the valueOptions. We have to do this by getting the labels and options, because there are no methods that
// return the valueOptions in a list.
String[] optionLabels = newAttribute.getOptionLabels();
String[] optionValues = newAttribute.getOptionValues();
if (optionLabels == null || optionLabels.length == 0)
optionLabels = oldAttribute.getOptionLabels();
if (optionValues == null || optionValues.length == 0)
optionValues = oldAttribute.getOptionValues();
// We need to carefully build up the arrays that are stored in the list.
List<String[]> valueOptions = new ArrayList<String[]>();
if (optionLabels != null) {
for (int i = 0; i < optionLabels.length; i++) {
String[] newArray = new String[] { optionValues[i], optionLabels[i] };
valueOptions.add(newArray);
}
}
result.setValueOptions(valueOptions);
// We need to build up the extensions into an ExtendableHelper. Again we can get the individual extensions but have to build
// the map of maps ourselves. We also make sure any overridden extension uris are also replaced in the final map.
// _Linked_HashMaps are used to preserve insertion order, for tools
Map<String, Map<String, String>> extensions = new LinkedHashMap<String, Map<String, String>>();
// Get a list of all the Extension URIs in both the old and new Attribute.
Set<String> extensionURIs = new LinkedHashSet<String>();
Set<String> newExtensionURIs = newAttribute.getExtensionUris();
extensionURIs.addAll(oldAttribute.getExtensionUris());
extensionURIs.addAll(newExtensionURIs);
// Now go and get the extensions for each of the URIs.
for (String currUri : extensionURIs) {
Map<String, String> currExtensions = new LinkedHashMap<String, String>();
if (newExtensionURIs.contains(currUri))
currExtensions.putAll(newAttribute.getExtensions(currUri));
else
currExtensions.putAll(oldAttribute.getExtensions(currUri));
// If we're dealing with the metatype extension uri, remove the rename extension.
if (SchemaGenConstants.METATYPE_EXTENSION_URI.equals(currUri)) {
currExtensions.remove("rename");
}
extensions.put(currUri, Collections.unmodifiableMap(currExtensions));
}
ExtendableHelper helper = new ExtendableHelper(extensions);
result.setExtendedAttributes(helper);
return result;
} } | public class class_name {
private AttributeDefinition renameAttribute(String currDefId, ExtendedAttributeDefinition oldAttribute, ExtendedAttributeDefinition newAttribute, boolean required) {
AttributeDefinitionSpecification result = new AttributeDefinitionSpecification();
// Set some original values
result.setId(newAttribute.getID());
result.setType(oldAttribute.getType());
result.setRequired(required);
if (result.getType() != newAttribute.getType()) {
// If the type of the new Attribute doesn't match the type of the original we need to issue a warning.
error("schemagen.invalid.type.override", new Object[] { newAttribute.getID(), currDefId, result.getType() }); // depends on control dependency: [if], data = [none]
}
result.setName((newAttribute.getName() != null) ? newAttribute.getName() : oldAttribute.getName());
result.setDescription((newAttribute.getDescription() != null) ? newAttribute.getDescription() : oldAttribute.getDescription());
result.setDefaultValue((newAttribute.getDefaultValue() != null) ? newAttribute.getDefaultValue() : oldAttribute.getDefaultValue());
result.setCardinality((newAttribute.getCardinality() != 0) ? newAttribute.getCardinality() : oldAttribute.getCardinality());
// We need to build up the valueOptions. We have to do this by getting the labels and options, because there are no methods that
// return the valueOptions in a list.
String[] optionLabels = newAttribute.getOptionLabels();
String[] optionValues = newAttribute.getOptionValues();
if (optionLabels == null || optionLabels.length == 0)
optionLabels = oldAttribute.getOptionLabels();
if (optionValues == null || optionValues.length == 0)
optionValues = oldAttribute.getOptionValues();
// We need to carefully build up the arrays that are stored in the list.
List<String[]> valueOptions = new ArrayList<String[]>();
if (optionLabels != null) {
for (int i = 0; i < optionLabels.length; i++) {
String[] newArray = new String[] { optionValues[i], optionLabels[i] };
valueOptions.add(newArray); // depends on control dependency: [for], data = [none]
}
}
result.setValueOptions(valueOptions);
// We need to build up the extensions into an ExtendableHelper. Again we can get the individual extensions but have to build
// the map of maps ourselves. We also make sure any overridden extension uris are also replaced in the final map.
// _Linked_HashMaps are used to preserve insertion order, for tools
Map<String, Map<String, String>> extensions = new LinkedHashMap<String, Map<String, String>>();
// Get a list of all the Extension URIs in both the old and new Attribute.
Set<String> extensionURIs = new LinkedHashSet<String>();
Set<String> newExtensionURIs = newAttribute.getExtensionUris();
extensionURIs.addAll(oldAttribute.getExtensionUris());
extensionURIs.addAll(newExtensionURIs);
// Now go and get the extensions for each of the URIs.
for (String currUri : extensionURIs) {
Map<String, String> currExtensions = new LinkedHashMap<String, String>();
if (newExtensionURIs.contains(currUri))
currExtensions.putAll(newAttribute.getExtensions(currUri));
else
currExtensions.putAll(oldAttribute.getExtensions(currUri));
// If we're dealing with the metatype extension uri, remove the rename extension.
if (SchemaGenConstants.METATYPE_EXTENSION_URI.equals(currUri)) {
currExtensions.remove("rename"); // depends on control dependency: [if], data = [none]
}
extensions.put(currUri, Collections.unmodifiableMap(currExtensions)); // depends on control dependency: [for], data = [currUri]
}
ExtendableHelper helper = new ExtendableHelper(extensions);
result.setExtendedAttributes(helper);
return result;
} } |
public class class_name {
public String[] lookupAllPrefixes(String uri)
{
java.util.ArrayList foundPrefixes = new java.util.ArrayList();
Enumeration prefixes = m_namespaces.keys();
while (prefixes.hasMoreElements())
{
String prefix = (String) prefixes.nextElement();
String uri2 = lookupNamespace(prefix);
if (uri2 != null && uri2.equals(uri))
{
foundPrefixes.add(prefix);
}
}
String[] prefixArray = new String[foundPrefixes.size()];
foundPrefixes.toArray(prefixArray);
return prefixArray;
} } | public class class_name {
public String[] lookupAllPrefixes(String uri)
{
java.util.ArrayList foundPrefixes = new java.util.ArrayList();
Enumeration prefixes = m_namespaces.keys();
while (prefixes.hasMoreElements())
{
String prefix = (String) prefixes.nextElement();
String uri2 = lookupNamespace(prefix);
if (uri2 != null && uri2.equals(uri))
{
foundPrefixes.add(prefix); // depends on control dependency: [if], data = [none]
}
}
String[] prefixArray = new String[foundPrefixes.size()];
foundPrefixes.toArray(prefixArray);
return prefixArray;
} } |
public class class_name {
public String pageBody(int segment, String className, String parameters) {
if (segment == HTML_START) {
StringBuffer result = new StringBuffer(128);
result.append("</head>\n<body unselectable=\"on\"");
if (className != null) {
result.append(" class=\"");
result.append(className);
result.append("\"");
}
if (CmsStringUtil.isNotEmpty(parameters)) {
result.append(" ");
result.append(parameters);
}
result.append(">\n");
return result.toString();
} else {
return "</body>";
}
} } | public class class_name {
public String pageBody(int segment, String className, String parameters) {
if (segment == HTML_START) {
StringBuffer result = new StringBuffer(128);
result.append("</head>\n<body unselectable=\"on\""); // depends on control dependency: [if], data = [none]
if (className != null) {
result.append(" class=\""); // depends on control dependency: [if], data = [none]
result.append(className); // depends on control dependency: [if], data = [(className]
result.append("\""); // depends on control dependency: [if], data = [none]
}
if (CmsStringUtil.isNotEmpty(parameters)) {
result.append(" "); // depends on control dependency: [if], data = [none]
result.append(parameters); // depends on control dependency: [if], data = [none]
}
result.append(">\n"); // depends on control dependency: [if], data = [none]
return result.toString(); // depends on control dependency: [if], data = [none]
} else {
return "</body>"; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public synchronized String getSubDomain()
{
if(m_imageUrlSubdomains==null || m_imageUrlSubdomains.length<=0) {
return null;
}
final String result = m_imageUrlSubdomains[m_subdomainsCounter];
if(m_subdomainsCounter<m_imageUrlSubdomains.length-1) {
m_subdomainsCounter++;
} else {
m_subdomainsCounter=0;
}
return result;
} } | public class class_name {
public synchronized String getSubDomain()
{
if(m_imageUrlSubdomains==null || m_imageUrlSubdomains.length<=0) {
return null; // depends on control dependency: [if], data = [none]
}
final String result = m_imageUrlSubdomains[m_subdomainsCounter];
if(m_subdomainsCounter<m_imageUrlSubdomains.length-1) {
m_subdomainsCounter++; // depends on control dependency: [if], data = [none]
} else {
m_subdomainsCounter=0; // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
private void createNodes(DefaultMutableTreeNode top) {
// leaves
DefaultMutableTreeNode defaultMutableTreeNode = null;
// Train Files
DefaultMutableTreeNode trainCategory = new DefaultMutableTreeNode("Train");
top.add(trainCategory);
// Test Files
DefaultMutableTreeNode testCategory = new DefaultMutableTreeNode("Test");
top.add(testCategory);
// long count = UCSDAnomalyDAO.selectCount();
int trainTestSplit = UCSDAnomalyDAO.getTrainTestSplit();
int index = 1;
do {
DefaultMutableTreeNode category = new DefaultMutableTreeNode(index);
if (index <= trainTestSplit) {
trainCategory.add(category);
} else {
testCategory.add(category);
}
List<UCSDAnomaly> fames = UCSDAnomalyDAO.selectClip(index);
for (UCSDAnomaly ucsdAnomaly : fames) {
// ucsdAnomaly.getTifid();
defaultMutableTreeNode = new DefaultMutableTreeNode(ucsdAnomaly);
category.add(defaultMutableTreeNode);
}
} while (UCSDAnomalyDAO.selectClip(++index).size() > 0);
} } | public class class_name {
private void createNodes(DefaultMutableTreeNode top) {
// leaves
DefaultMutableTreeNode defaultMutableTreeNode = null;
// Train Files
DefaultMutableTreeNode trainCategory = new DefaultMutableTreeNode("Train");
top.add(trainCategory);
// Test Files
DefaultMutableTreeNode testCategory = new DefaultMutableTreeNode("Test");
top.add(testCategory);
// long count = UCSDAnomalyDAO.selectCount();
int trainTestSplit = UCSDAnomalyDAO.getTrainTestSplit();
int index = 1;
do {
DefaultMutableTreeNode category = new DefaultMutableTreeNode(index);
if (index <= trainTestSplit) {
trainCategory.add(category); // depends on control dependency: [if], data = [none]
} else {
testCategory.add(category); // depends on control dependency: [if], data = [none]
}
List<UCSDAnomaly> fames = UCSDAnomalyDAO.selectClip(index);
for (UCSDAnomaly ucsdAnomaly : fames) {
// ucsdAnomaly.getTifid();
defaultMutableTreeNode = new DefaultMutableTreeNode(ucsdAnomaly); // depends on control dependency: [for], data = [ucsdAnomaly]
category.add(defaultMutableTreeNode); // depends on control dependency: [for], data = [none]
}
} while (UCSDAnomalyDAO.selectClip(++index).size() > 0);
} } |
public class class_name {
public static void printMatrixData(double[][] matrix) {
int cols = matrix[0].length;
int rows = matrix.length;
for (int r = 0; r < rows; r++) {
for (int c = 0; c < cols; c++) {
printer.print(matrix[r][c]);
printer.print(separator);
}
printer.println();
}
} } | public class class_name {
public static void printMatrixData(double[][] matrix) {
int cols = matrix[0].length;
int rows = matrix.length;
for (int r = 0; r < rows; r++) {
for (int c = 0; c < cols; c++) {
printer.print(matrix[r][c]); // depends on control dependency: [for], data = [c]
printer.print(separator); // depends on control dependency: [for], data = [none]
}
printer.println(); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
@Trivial
private void stateHalfOpen() {
halfOpenRunningExecutions = 0;
halfOpenSuccessfulExecutions = 0;
state.set(State.HALF_OPEN);
metricRecorder.reportCircuitHalfOpen();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Transitioned to Half Open state");
}
} } | public class class_name {
@Trivial
private void stateHalfOpen() {
halfOpenRunningExecutions = 0;
halfOpenSuccessfulExecutions = 0;
state.set(State.HALF_OPEN);
metricRecorder.reportCircuitHalfOpen();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "Transitioned to Half Open state"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public final EObject ruleRichStringPart() throws RecognitionException {
EObject current = null;
EObject this_XExpressionOrVarDeclaration_0 = null;
EObject this_RichStringForLoop_1 = null;
EObject this_RichStringIf_2 = null;
enterRule();
try {
// InternalSARL.g:11132:2: ( (this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration | this_RichStringForLoop_1= ruleRichStringForLoop | this_RichStringIf_2= ruleRichStringIf ) )
// InternalSARL.g:11133:2: (this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration | this_RichStringForLoop_1= ruleRichStringForLoop | this_RichStringIf_2= ruleRichStringIf )
{
// InternalSARL.g:11133:2: (this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration | this_RichStringForLoop_1= ruleRichStringForLoop | this_RichStringIf_2= ruleRichStringIf )
int alt280=3;
switch ( input.LA(1) ) {
case RULE_STRING:
case RULE_ID:
case RULE_RICH_TEXT:
case RULE_RICH_TEXT_START:
case RULE_HEX:
case RULE_INT:
case RULE_DECIMAL:
case 25:
case 28:
case 29:
case 36:
case 39:
case 40:
case 42:
case 43:
case 44:
case 45:
case 48:
case 49:
case 51:
case 55:
case 60:
case 61:
case 62:
case 63:
case 65:
case 66:
case 67:
case 68:
case 73:
case 74:
case 75:
case 78:
case 79:
case 80:
case 81:
case 82:
case 83:
case 84:
case 85:
case 86:
case 87:
case 88:
case 89:
case 90:
case 91:
case 92:
case 93:
case 94:
case 95:
case 96:
case 106:
case 129:
case 131:
case 132:
case 133:
case 134:
case 135:
case 136:
case 137:
case 138:
case 139:
case 140:
{
alt280=1;
}
break;
case 99:
{
alt280=2;
}
break;
case 101:
{
alt280=3;
}
break;
default:
if (state.backtracking>0) {state.failed=true; return current;}
NoViableAltException nvae =
new NoViableAltException("", 280, 0, input);
throw nvae;
}
switch (alt280) {
case 1 :
// InternalSARL.g:11134:3: this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getRichStringPartAccess().getXExpressionOrVarDeclarationParserRuleCall_0());
}
pushFollow(FOLLOW_2);
this_XExpressionOrVarDeclaration_0=ruleXExpressionOrVarDeclaration();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XExpressionOrVarDeclaration_0;
afterParserOrEnumRuleCall();
}
}
break;
case 2 :
// InternalSARL.g:11143:3: this_RichStringForLoop_1= ruleRichStringForLoop
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getRichStringPartAccess().getRichStringForLoopParserRuleCall_1());
}
pushFollow(FOLLOW_2);
this_RichStringForLoop_1=ruleRichStringForLoop();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_RichStringForLoop_1;
afterParserOrEnumRuleCall();
}
}
break;
case 3 :
// InternalSARL.g:11152:3: this_RichStringIf_2= ruleRichStringIf
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getRichStringPartAccess().getRichStringIfParserRuleCall_2());
}
pushFollow(FOLLOW_2);
this_RichStringIf_2=ruleRichStringIf();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_RichStringIf_2;
afterParserOrEnumRuleCall();
}
}
break;
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} } | public class class_name {
public final EObject ruleRichStringPart() throws RecognitionException {
EObject current = null;
EObject this_XExpressionOrVarDeclaration_0 = null;
EObject this_RichStringForLoop_1 = null;
EObject this_RichStringIf_2 = null;
enterRule();
try {
// InternalSARL.g:11132:2: ( (this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration | this_RichStringForLoop_1= ruleRichStringForLoop | this_RichStringIf_2= ruleRichStringIf ) )
// InternalSARL.g:11133:2: (this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration | this_RichStringForLoop_1= ruleRichStringForLoop | this_RichStringIf_2= ruleRichStringIf )
{
// InternalSARL.g:11133:2: (this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration | this_RichStringForLoop_1= ruleRichStringForLoop | this_RichStringIf_2= ruleRichStringIf )
int alt280=3;
switch ( input.LA(1) ) {
case RULE_STRING:
case RULE_ID:
case RULE_RICH_TEXT:
case RULE_RICH_TEXT_START:
case RULE_HEX:
case RULE_INT:
case RULE_DECIMAL:
case 25:
case 28:
case 29:
case 36:
case 39:
case 40:
case 42:
case 43:
case 44:
case 45:
case 48:
case 49:
case 51:
case 55:
case 60:
case 61:
case 62:
case 63:
case 65:
case 66:
case 67:
case 68:
case 73:
case 74:
case 75:
case 78:
case 79:
case 80:
case 81:
case 82:
case 83:
case 84:
case 85:
case 86:
case 87:
case 88:
case 89:
case 90:
case 91:
case 92:
case 93:
case 94:
case 95:
case 96:
case 106:
case 129:
case 131:
case 132:
case 133:
case 134:
case 135:
case 136:
case 137:
case 138:
case 139:
case 140:
{
alt280=1;
}
break;
case 99:
{
alt280=2;
}
break;
case 101:
{
alt280=3;
}
break;
default:
if (state.backtracking>0) {state.failed=true; return current;} // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
NoViableAltException nvae =
new NoViableAltException("", 280, 0, input);
throw nvae;
}
switch (alt280) {
case 1 :
// InternalSARL.g:11134:3: this_XExpressionOrVarDeclaration_0= ruleXExpressionOrVarDeclaration
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getRichStringPartAccess().getXExpressionOrVarDeclarationParserRuleCall_0()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_2);
this_XExpressionOrVarDeclaration_0=ruleXExpressionOrVarDeclaration();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_XExpressionOrVarDeclaration_0; // depends on control dependency: [if], data = [none]
afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none]
}
}
break;
case 2 :
// InternalSARL.g:11143:3: this_RichStringForLoop_1= ruleRichStringForLoop
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getRichStringPartAccess().getRichStringForLoopParserRuleCall_1()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_2);
this_RichStringForLoop_1=ruleRichStringForLoop();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_RichStringForLoop_1; // depends on control dependency: [if], data = [none]
afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none]
}
}
break;
case 3 :
// InternalSARL.g:11152:3: this_RichStringIf_2= ruleRichStringIf
{
if ( state.backtracking==0 ) {
newCompositeNode(grammarAccess.getRichStringPartAccess().getRichStringIfParserRuleCall_2()); // depends on control dependency: [if], data = [none]
}
pushFollow(FOLLOW_2);
this_RichStringIf_2=ruleRichStringIf();
state._fsp--;
if (state.failed) return current;
if ( state.backtracking==0 ) {
current = this_RichStringIf_2; // depends on control dependency: [if], data = [none]
afterParserOrEnumRuleCall(); // depends on control dependency: [if], data = [none]
}
}
break;
}
}
if ( state.backtracking==0 ) {
leaveRule();
}
}
catch (RecognitionException re) {
recover(input,re);
appendSkippedTokens();
}
finally {
}
return current;
} } |
public class class_name {
@Override
public void removeByCommerceNotificationQueueEntryId(
long commerceNotificationQueueEntryId) {
for (CommerceNotificationAttachment commerceNotificationAttachment : findByCommerceNotificationQueueEntryId(
commerceNotificationQueueEntryId, QueryUtil.ALL_POS,
QueryUtil.ALL_POS, null)) {
remove(commerceNotificationAttachment);
}
} } | public class class_name {
@Override
public void removeByCommerceNotificationQueueEntryId(
long commerceNotificationQueueEntryId) {
for (CommerceNotificationAttachment commerceNotificationAttachment : findByCommerceNotificationQueueEntryId(
commerceNotificationQueueEntryId, QueryUtil.ALL_POS,
QueryUtil.ALL_POS, null)) {
remove(commerceNotificationAttachment); // depends on control dependency: [for], data = [commerceNotificationAttachment]
}
} } |
public class class_name {
public final <T2, O> Mono<O> zipWith(Mono<? extends T2> other,
BiFunction<? super T, ? super T2, ? extends O> combinator) {
if (this instanceof MonoZip) {
@SuppressWarnings("unchecked") MonoZip<T, O> o = (MonoZip<T, O>) this;
Mono<O> result = o.zipAdditionalSource(other, combinator);
if (result != null) {
return result;
}
}
return zip(this, other, combinator);
} } | public class class_name {
public final <T2, O> Mono<O> zipWith(Mono<? extends T2> other,
BiFunction<? super T, ? super T2, ? extends O> combinator) {
if (this instanceof MonoZip) {
@SuppressWarnings("unchecked") MonoZip<T, O> o = (MonoZip<T, O>) this;
Mono<O> result = o.zipAdditionalSource(other, combinator);
if (result != null) {
return result; // depends on control dependency: [if], data = [none]
}
}
return zip(this, other, combinator);
} } |
public class class_name {
private String formatValue(String[] value) {
String ret = "";
for(int i=0;i<value.length;i++) {
ret += value[i];
if(i<value.length-1) ret+="\n";
}
return ret;
} } | public class class_name {
private String formatValue(String[] value) {
String ret = "";
for(int i=0;i<value.length;i++) {
ret += value[i]; // depends on control dependency: [for], data = [i]
if(i<value.length-1) ret+="\n";
}
return ret;
} } |
public class class_name {
public void setExprList(ExprList exprList) {
int len = exprList.length();
if (len == 0) {
throw new ParseException("模板函数名不能缺失", location);
}
int index = 0;
Expr expr = exprList.getExpr(index);
if (expr instanceof Const && ((Const)expr).isBoolean()) {
if (len == 1) {
throw new ParseException("模板函数名不能缺失", location);
}
nullSafe = ((Const)expr).getBoolean();
index++;
}
funcNameExpr = exprList.getExpr(index++);
ArrayList<Expr> list = new ArrayList<Expr>();
for (int i=index; i<len; i++) {
list.add(exprList.getExpr(i));
}
paraExpr = new ExprList(list);
} } | public class class_name {
public void setExprList(ExprList exprList) {
int len = exprList.length();
if (len == 0) {
throw new ParseException("模板函数名不能缺失", location);
}
int index = 0;
Expr expr = exprList.getExpr(index);
if (expr instanceof Const && ((Const)expr).isBoolean()) {
if (len == 1) {
throw new ParseException("模板函数名不能缺失", location);
}
nullSafe = ((Const)expr).getBoolean();
// depends on control dependency: [if], data = [none]
index++;
// depends on control dependency: [if], data = [none]
}
funcNameExpr = exprList.getExpr(index++);
ArrayList<Expr> list = new ArrayList<Expr>();
for (int i=index; i<len; i++) {
list.add(exprList.getExpr(i));
// depends on control dependency: [for], data = [i]
}
paraExpr = new ExprList(list);
} } |
public class class_name {
private boolean setupEphemeralECDHKeys() {
int index = -1;
if (supportedCurves != null) {
// if the client sent the supported curves extension, pick the
// first one that we support;
for (int curveId : supportedCurves.curveIds()) {
if (SupportedEllipticCurvesExtension.isSupported(curveId)) {
index = curveId;
break;
}
}
if (index < 0) {
// no match found, cannot use this ciphersuite
return false;
}
} else {
// pick our preference
index = SupportedEllipticCurvesExtension.DEFAULT.curveIds()[0];
}
String oid = SupportedEllipticCurvesExtension.getCurveOid(index);
ecdh = new ECDHCrypt(oid, sslContext.getSecureRandom());
return true;
} } | public class class_name {
private boolean setupEphemeralECDHKeys() {
int index = -1;
if (supportedCurves != null) {
// if the client sent the supported curves extension, pick the
// first one that we support;
for (int curveId : supportedCurves.curveIds()) {
if (SupportedEllipticCurvesExtension.isSupported(curveId)) {
index = curveId; // depends on control dependency: [if], data = [none]
break;
}
}
if (index < 0) {
// no match found, cannot use this ciphersuite
return false; // depends on control dependency: [if], data = [none]
}
} else {
// pick our preference
index = SupportedEllipticCurvesExtension.DEFAULT.curveIds()[0]; // depends on control dependency: [if], data = [none]
}
String oid = SupportedEllipticCurvesExtension.getCurveOid(index);
ecdh = new ECDHCrypt(oid, sslContext.getSecureRandom());
return true;
} } |
public class class_name {
@Remote
public Challenge getChallenge(int captchaIndex) {
if (imagesRepositoryDir == null) {
log.debug("Simple CAPTCHA not properly initialized. Missing <captcha> section from application descriptor:\r\n" + //
"\t<captcha>\r\n" + //
"\t\t<property name=\"captcha.repository.path\" value=\"/path/to/captcha/images\" />\r\n" + //
"\t\t<property name=\"captcha.set.size\" value=\"5\" />\r\n" + //
"\t</captcha>");
throw new BugError("Missing CAPTCHA images repository. Most probably <captcha> section is missing from application descriptor.");
}
Challenge challenge = new Challenge(imagesRepositoryDir, challengeSetSize);
getChallenges().put(captchaIndex, challenge);
return challenge;
} } | public class class_name {
@Remote
public Challenge getChallenge(int captchaIndex) {
if (imagesRepositoryDir == null) {
log.debug("Simple CAPTCHA not properly initialized. Missing <captcha> section from application descriptor:\r\n" + //
"\t<captcha>\r\n" + //
"\t\t<property name=\"captcha.repository.path\" value=\"/path/to/captcha/images\" />\r\n" + //
"\t\t<property name=\"captcha.set.size\" value=\"5\" />\r\n" + //
"\t</captcha>");
// depends on control dependency: [if], data = [none]
throw new BugError("Missing CAPTCHA images repository. Most probably <captcha> section is missing from application descriptor.");
}
Challenge challenge = new Challenge(imagesRepositoryDir, challengeSetSize);
getChallenges().put(captchaIndex, challenge);
return challenge;
} } |
public class class_name {
public void flushAllLogs(final boolean force) {
Iterator<Log> iter = getLogIterator();
while (iter.hasNext()) {
Log log = iter.next();
try {
boolean needFlush = force;
if (!needFlush) {
long timeSinceLastFlush = System.currentTimeMillis() - log.getLastFlushedTime();
Integer logFlushInterval = logFlushIntervalMap.get(log.getTopicName());
if (logFlushInterval == null) {
logFlushInterval = config.getDefaultFlushIntervalMs();
}
final String flushLogFormat = "[%s] flush interval %d, last flushed %d, need flush? %s";
needFlush = timeSinceLastFlush >= logFlushInterval.intValue();
logger.trace(String.format(flushLogFormat, log.getTopicName(), logFlushInterval,
log.getLastFlushedTime(), needFlush));
}
if (needFlush) {
log.flush();
}
} catch (IOException ioe) {
logger.error("Error flushing topic " + log.getTopicName(), ioe);
logger.error("Halting due to unrecoverable I/O error while flushing logs: " + ioe.getMessage(), ioe);
Runtime.getRuntime().halt(1);
} catch (Exception e) {
logger.error("Error flushing topic " + log.getTopicName(), e);
}
}
} } | public class class_name {
public void flushAllLogs(final boolean force) {
Iterator<Log> iter = getLogIterator();
while (iter.hasNext()) {
Log log = iter.next();
try {
boolean needFlush = force;
if (!needFlush) {
long timeSinceLastFlush = System.currentTimeMillis() - log.getLastFlushedTime();
Integer logFlushInterval = logFlushIntervalMap.get(log.getTopicName());
if (logFlushInterval == null) {
logFlushInterval = config.getDefaultFlushIntervalMs(); // depends on control dependency: [if], data = [none]
}
final String flushLogFormat = "[%s] flush interval %d, last flushed %d, need flush? %s"; // depends on control dependency: [if], data = [none]
needFlush = timeSinceLastFlush >= logFlushInterval.intValue(); // depends on control dependency: [if], data = [none]
logger.trace(String.format(flushLogFormat, log.getTopicName(), logFlushInterval,
log.getLastFlushedTime(), needFlush)); // depends on control dependency: [if], data = [none]
}
if (needFlush) {
log.flush(); // depends on control dependency: [if], data = [none]
}
} catch (IOException ioe) {
logger.error("Error flushing topic " + log.getTopicName(), ioe);
logger.error("Halting due to unrecoverable I/O error while flushing logs: " + ioe.getMessage(), ioe);
Runtime.getRuntime().halt(1);
} catch (Exception e) { // depends on control dependency: [catch], data = [none]
logger.error("Error flushing topic " + log.getTopicName(), e);
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private AMethodWithItsArgs findBestMethodUsingParamNames(Set<Method> methods, Set<String> paramNames, ObjectNode paramNodes) {
ParameterCount max = new ParameterCount();
for (Method method : methods) {
List<Class<?>> parameterTypes = getParameterTypes(method);
int typeNameCountDiff = parameterTypes.size() - paramNames.size();
if (!acceptParamCount(typeNameCountDiff)) {
continue;
}
ParameterCount parStat = new ParameterCount(paramNames, paramNodes, parameterTypes, method);
if (!acceptParamCount(parStat.nameCount - paramNames.size())) {
continue;
}
if (hasMoreMatches(max.nameCount, parStat.nameCount) || parStat.nameCount == max.nameCount && hasMoreMatches(max.typeCount, parStat.typeCount)) {
max = parStat;
}
}
if (max.method == null) {
return null;
}
return new AMethodWithItsArgs(max.method, paramNames, max.allNames, paramNodes);
} } | public class class_name {
private AMethodWithItsArgs findBestMethodUsingParamNames(Set<Method> methods, Set<String> paramNames, ObjectNode paramNodes) {
ParameterCount max = new ParameterCount();
for (Method method : methods) {
List<Class<?>> parameterTypes = getParameterTypes(method); // depends on control dependency: [for], data = [method]
int typeNameCountDiff = parameterTypes.size() - paramNames.size();
if (!acceptParamCount(typeNameCountDiff)) {
continue;
}
ParameterCount parStat = new ParameterCount(paramNames, paramNodes, parameterTypes, method);
if (!acceptParamCount(parStat.nameCount - paramNames.size())) {
continue;
}
if (hasMoreMatches(max.nameCount, parStat.nameCount) || parStat.nameCount == max.nameCount && hasMoreMatches(max.typeCount, parStat.typeCount)) {
max = parStat; // depends on control dependency: [if], data = [none]
}
}
if (max.method == null) {
return null; // depends on control dependency: [if], data = [none]
}
return new AMethodWithItsArgs(max.method, paramNames, max.allNames, paramNodes);
} } |
public class class_name {
public static final Parser<Double> instance() { // NOPMD it's thread save!
if (PercentDoubleParser.instanceParser == null) {
synchronized (PercentDoubleParser.class) {
if (PercentDoubleParser.instanceParser == null) {
PercentDoubleParser.instanceParser = new PercentDoubleParser();
}
}
}
return PercentDoubleParser.instanceParser;
} } | public class class_name {
public static final Parser<Double> instance() { // NOPMD it's thread save!
if (PercentDoubleParser.instanceParser == null) {
synchronized (PercentDoubleParser.class) { // depends on control dependency: [if], data = [none]
if (PercentDoubleParser.instanceParser == null) {
PercentDoubleParser.instanceParser = new PercentDoubleParser(); // depends on control dependency: [if], data = [none]
}
}
}
return PercentDoubleParser.instanceParser;
} } |
public class class_name {
public static GameMap createMapFromReplayFile(String gioReplayFileLocation) {
try {
Replay replay = OBJECT_MAPPER.readValue(new File(gioReplayFileLocation), Replay.class);
return createMapFromReplay(replay);
} catch (IOException e) {
throw new RuntimeException("Can not create game map from file: " + gioReplayFileLocation, e);
}
} } | public class class_name {
public static GameMap createMapFromReplayFile(String gioReplayFileLocation) {
try {
Replay replay = OBJECT_MAPPER.readValue(new File(gioReplayFileLocation), Replay.class);
return createMapFromReplay(replay); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
throw new RuntimeException("Can not create game map from file: " + gioReplayFileLocation, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public final void setCountryCode(final String pcountryCode, final Locale plocale) {
if (StringUtils.isEmpty(pcountryCode)) {
defaultCountryData = null;
} else {
defaultCountryData =
CreatePhoneCountryConstantsClass.create(plocale).countryMap().get(pcountryCode);
}
} } | public class class_name {
public final void setCountryCode(final String pcountryCode, final Locale plocale) {
if (StringUtils.isEmpty(pcountryCode)) {
defaultCountryData = null; // depends on control dependency: [if], data = [none]
} else {
defaultCountryData =
CreatePhoneCountryConstantsClass.create(plocale).countryMap().get(pcountryCode); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean changePattern (FormationPattern<T> pattern) {
// Find out how many slots we have occupied
int occupiedSlots = slotAssignments.size;
// Check if the pattern supports one more slot
if (pattern.supportsSlots(occupiedSlots)) {
setPattern(pattern);
// Update the slot assignments and return success
updateSlotAssignments();
return true;
}
return false;
} } | public class class_name {
public boolean changePattern (FormationPattern<T> pattern) {
// Find out how many slots we have occupied
int occupiedSlots = slotAssignments.size;
// Check if the pattern supports one more slot
if (pattern.supportsSlots(occupiedSlots)) {
setPattern(pattern); // depends on control dependency: [if], data = [none]
// Update the slot assignments and return success
updateSlotAssignments(); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
private boolean isSyncCollectionCreation(int seen) {
if (seen == Const.INVOKESPECIAL) {
if (Values.CONSTRUCTOR.equals(getNameConstantOperand())) {
return (syncCollections.contains(getClassConstantOperand()));
}
} else if ((seen == Const.INVOKESTATIC) && "java/util/Collections".equals(getClassConstantOperand())) {
String methodName = getNameConstantOperand();
return ("synchronizedMap".equals(methodName) || "synchronizedSet".equals(methodName));
}
return false;
} } | public class class_name {
private boolean isSyncCollectionCreation(int seen) {
if (seen == Const.INVOKESPECIAL) {
if (Values.CONSTRUCTOR.equals(getNameConstantOperand())) {
return (syncCollections.contains(getClassConstantOperand())); // depends on control dependency: [if], data = [none]
}
} else if ((seen == Const.INVOKESTATIC) && "java/util/Collections".equals(getClassConstantOperand())) {
String methodName = getNameConstantOperand();
return ("synchronizedMap".equals(methodName) || "synchronizedSet".equals(methodName)); // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
protected void initChildNodes(final Node newNode) {
if (childNodes == null) {
childNodes = new ArrayList<>();
}
if (ownerDocument != null) {
if (newNode.ownerDocument != ownerDocument) {
changeOwnerDocument(newNode, ownerDocument);
}
}
} } | public class class_name {
protected void initChildNodes(final Node newNode) {
if (childNodes == null) {
childNodes = new ArrayList<>(); // depends on control dependency: [if], data = [none]
}
if (ownerDocument != null) {
if (newNode.ownerDocument != ownerDocument) {
changeOwnerDocument(newNode, ownerDocument); // depends on control dependency: [if], data = [ownerDocument)]
}
}
} } |
public class class_name {
public Object execute(final Object value, final CsvContext context) {
validateInputNotNull(value, context);
if( !(value instanceof String) ) {
throw new SuperCsvCellProcessorException(String.class, value, context, this);
}
final String stringValue = (String) value;
final Boolean result;
if( contains(trueValues, stringValue, ignoreCase) ) {
result = Boolean.TRUE;
} else if( contains(falseValues, stringValue, ignoreCase) ) {
result = Boolean.FALSE;
} else {
throw new SuperCsvCellProcessorException(String.format("'%s' could not be parsed as a Boolean", value),
context, this);
}
return next.execute(result, context);
} } | public class class_name {
public Object execute(final Object value, final CsvContext context) {
validateInputNotNull(value, context);
if( !(value instanceof String) ) {
throw new SuperCsvCellProcessorException(String.class, value, context, this);
}
final String stringValue = (String) value;
final Boolean result;
if( contains(trueValues, stringValue, ignoreCase) ) {
result = Boolean.TRUE; // depends on control dependency: [if], data = [none]
} else if( contains(falseValues, stringValue, ignoreCase) ) {
result = Boolean.FALSE; // depends on control dependency: [if], data = [none]
} else {
throw new SuperCsvCellProcessorException(String.format("'%s' could not be parsed as a Boolean", value),
context, this);
}
return next.execute(result, context);
} } |
public class class_name {
@Override
public List<ProposalPersonContract> getKeyPersons(ProposalDevelopmentDocumentContract pdDoc) {
List<ProposalPersonContract> keyPersons = new ArrayList<>();
if (pdDoc != null) {
for (ProposalPersonContract person : pdDoc.getDevelopmentProposal().getProposalPersons()) {
if (person.isKeyPerson()) {
keyPersons.add(person);
}
}
}
return keyPersons;
} } | public class class_name {
@Override
public List<ProposalPersonContract> getKeyPersons(ProposalDevelopmentDocumentContract pdDoc) {
List<ProposalPersonContract> keyPersons = new ArrayList<>();
if (pdDoc != null) {
for (ProposalPersonContract person : pdDoc.getDevelopmentProposal().getProposalPersons()) {
if (person.isKeyPerson()) {
keyPersons.add(person); // depends on control dependency: [if], data = [none]
}
}
}
return keyPersons;
} } |
public class class_name {
public void addListeners()
{
super.addListeners();
// By default, all fields are disabled (enable the target fields).
for (int iFieldSeq = 0; iFieldSeq < this.getSFieldCount(); iFieldSeq++)
{
ScreenField sField = this.getSField(iFieldSeq);
if (!(sField instanceof ToolScreen))
sField.setEnabled(false);
}
// 2. The first thing to do is to see if this is a message reply or just a screen display
BaseMessage message = this.getMessage();
if (message != null)
{
this.moveMessageParamsToScreen(message);
if (this.isReplyMessage(message))
{
// If this is a reply message, you may want to display this screen differently.
}
}
} } | public class class_name {
public void addListeners()
{
super.addListeners();
// By default, all fields are disabled (enable the target fields).
for (int iFieldSeq = 0; iFieldSeq < this.getSFieldCount(); iFieldSeq++)
{
ScreenField sField = this.getSField(iFieldSeq);
if (!(sField instanceof ToolScreen))
sField.setEnabled(false);
}
// 2. The first thing to do is to see if this is a message reply or just a screen display
BaseMessage message = this.getMessage();
if (message != null)
{
this.moveMessageParamsToScreen(message); // depends on control dependency: [if], data = [(message]
if (this.isReplyMessage(message))
{
// If this is a reply message, you may want to display this screen differently.
}
}
} } |
public class class_name {
@SuppressWarnings("squid:S1067")
protected boolean isSymbolCharacter(Char ch) {
if (ch.isEndOfInput() || ch.isDigit() || ch.isLetter() || ch.isWhitepace()) {
return false;
}
char c = ch.getValue();
if (Character.isISOControl(c)) {
return false;
}
return !(isAtBracket(true)
|| isAtStartOfBlockComment(false)
|| isAtStartOfLineComment(false)
|| isAtStartOfNumber()
|| isAtStartOfIdentifier()
|| stringDelimiters.containsKey(ch.getValue()));
} } | public class class_name {
@SuppressWarnings("squid:S1067")
protected boolean isSymbolCharacter(Char ch) {
if (ch.isEndOfInput() || ch.isDigit() || ch.isLetter() || ch.isWhitepace()) {
return false; // depends on control dependency: [if], data = [none]
}
char c = ch.getValue();
if (Character.isISOControl(c)) {
return false; // depends on control dependency: [if], data = [none]
}
return !(isAtBracket(true)
|| isAtStartOfBlockComment(false)
|| isAtStartOfLineComment(false)
|| isAtStartOfNumber()
|| isAtStartOfIdentifier()
|| stringDelimiters.containsKey(ch.getValue()));
} } |
public class class_name {
private void initIndexTemplate(String type) {
String template = "template_" + type;
try {
if (doesResourceNotExist("/_template/" + template)) {
logger.info("Creating the index template '" + template + "'");
InputStream stream = ElasticSearchDAOV6.class.getResourceAsStream("/" + template + ".json");
byte[] templateSource = IOUtils.toByteArray(stream);
HttpEntity entity = new NByteArrayEntity(templateSource, ContentType.APPLICATION_JSON);
elasticSearchAdminClient.performRequest(HttpMethod.PUT, "/_template/" + template, Collections.emptyMap(), entity);
}
} catch (Exception e) {
logger.error("Failed to init " + template, e);
}
} } | public class class_name {
private void initIndexTemplate(String type) {
String template = "template_" + type;
try {
if (doesResourceNotExist("/_template/" + template)) {
logger.info("Creating the index template '" + template + "'"); // depends on control dependency: [if], data = [none]
InputStream stream = ElasticSearchDAOV6.class.getResourceAsStream("/" + template + ".json");
byte[] templateSource = IOUtils.toByteArray(stream);
HttpEntity entity = new NByteArrayEntity(templateSource, ContentType.APPLICATION_JSON);
elasticSearchAdminClient.performRequest(HttpMethod.PUT, "/_template/" + template, Collections.emptyMap(), entity); // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
logger.error("Failed to init " + template, e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static ICommentProcessor wrap(final ICommentProcessor processor, final IProcessorDialect dialect) {
Validate.notNull(dialect, "Dialect cannot be null");
if (processor == null) {
return null;
}
return new CommentProcessorWrapper(processor, dialect);
} } | public class class_name {
public static ICommentProcessor wrap(final ICommentProcessor processor, final IProcessorDialect dialect) {
Validate.notNull(dialect, "Dialect cannot be null");
if (processor == null) {
return null; // depends on control dependency: [if], data = [none]
}
return new CommentProcessorWrapper(processor, dialect);
} } |
public class class_name {
protected void ensureAvailableBytes(int amount) throws IOException {
if (!this.seekEnabled && amount > this.buffer.remaining()) {
this.bufferPosition += this.buffer.position();
this.buffer.compact();
int limit = this.buffer.position();
final int read = this.stream.read(this.buffer);
if (read < 0) {
if (limit == 0) {
throw new EOFException();
}
} else {
limit += read;
}
this.buffer.rewind();
this.buffer.limit(limit);
}
if (amount > this.buffer.remaining()) {
throw new EOFException();
}
} } | public class class_name {
protected void ensureAvailableBytes(int amount) throws IOException {
if (!this.seekEnabled && amount > this.buffer.remaining()) {
this.bufferPosition += this.buffer.position();
this.buffer.compact();
int limit = this.buffer.position();
final int read = this.stream.read(this.buffer);
if (read < 0) {
if (limit == 0) {
throw new EOFException();
}
} else {
limit += read; // depends on control dependency: [if], data = [none]
}
this.buffer.rewind();
this.buffer.limit(limit);
}
if (amount > this.buffer.remaining()) {
throw new EOFException();
}
} } |
public class class_name {
@Override
public EClass getIfcMechanicalFastenerType() {
if (ifcMechanicalFastenerTypeEClass == null) {
ifcMechanicalFastenerTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(377);
}
return ifcMechanicalFastenerTypeEClass;
} } | public class class_name {
@Override
public EClass getIfcMechanicalFastenerType() {
if (ifcMechanicalFastenerTypeEClass == null) {
ifcMechanicalFastenerTypeEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI)
.getEClassifiers().get(377);
// depends on control dependency: [if], data = [none]
}
return ifcMechanicalFastenerTypeEClass;
} } |
public class class_name {
public SignerConfig getSignerConfig(String serviceName, String regionName) {
if (serviceName == null)
throw new IllegalArgumentException();
SignerConfig signerConfig = null;
if (regionName != null) {
// Service+Region signer config has the highest precedence
String key = serviceName + SERVICE_REGION_DELIMITOR + regionName;
signerConfig = serviceRegionSigners.get(key);
if (signerConfig != null) {
return signerConfig;
}
// Region signer config has the 2nd highest precedence
signerConfig = regionSigners.get(regionName);
if (signerConfig != null) {
return signerConfig;
}
}
// Service signer config has the 3rd highest precedence
signerConfig = serviceSigners.get(serviceName);
// Fall back to the default
return signerConfig == null ? defaultSignerConfig : signerConfig;
} } | public class class_name {
public SignerConfig getSignerConfig(String serviceName, String regionName) {
if (serviceName == null)
throw new IllegalArgumentException();
SignerConfig signerConfig = null;
if (regionName != null) {
// Service+Region signer config has the highest precedence
String key = serviceName + SERVICE_REGION_DELIMITOR + regionName;
signerConfig = serviceRegionSigners.get(key); // depends on control dependency: [if], data = [none]
if (signerConfig != null) {
return signerConfig; // depends on control dependency: [if], data = [none]
}
// Region signer config has the 2nd highest precedence
signerConfig = regionSigners.get(regionName); // depends on control dependency: [if], data = [(regionName]
if (signerConfig != null) {
return signerConfig; // depends on control dependency: [if], data = [none]
}
}
// Service signer config has the 3rd highest precedence
signerConfig = serviceSigners.get(serviceName);
// Fall back to the default
return signerConfig == null ? defaultSignerConfig : signerConfig;
} } |
public class class_name {
private void validateSchema(Schema schema) throws DDFException {
Set<String> columnSet = new HashSet<String>();
if(schema != null && schema.getColumns() != null) {
for (Column column : schema.getColumns()) {
if (columnSet.contains(column.getName())) {
throw new DDFException(String.format("Duplicated column name %s", column.getName()));
} else {
columnSet.add(column.getName());
}
}
}
} } | public class class_name {
private void validateSchema(Schema schema) throws DDFException {
Set<String> columnSet = new HashSet<String>();
if(schema != null && schema.getColumns() != null) {
for (Column column : schema.getColumns()) {
if (columnSet.contains(column.getName())) {
throw new DDFException(String.format("Duplicated column name %s", column.getName()));
} else {
columnSet.add(column.getName()); // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public boolean transit(T transition){
S currentState = definition.getState(currentStateId.get());
if (currentState != null){
try{
Transition<S> transitionDef = definition.getTransition(currentState, transition);
return currentStateId.compareAndSet(transitionDef.fromStateId, transitionDef.toStateId);
}catch(IllegalArgumentException iae){
return false;
}catch(NullPointerException npe){
return false;
}
}
return false;
} } | public class class_name {
public boolean transit(T transition){
S currentState = definition.getState(currentStateId.get());
if (currentState != null){
try{
Transition<S> transitionDef = definition.getTransition(currentState, transition);
return currentStateId.compareAndSet(transitionDef.fromStateId, transitionDef.toStateId); // depends on control dependency: [try], data = [none]
}catch(IllegalArgumentException iae){
return false;
}catch(NullPointerException npe){ // depends on control dependency: [catch], data = [none]
return false;
} // depends on control dependency: [catch], data = [none]
}
return false;
} } |
public class class_name {
public final GitLabApi duplicate() {
Integer sudoUserId = this.getSudoAsId();
GitLabApi gitLabApi = new GitLabApi(apiVersion, gitLabServerUrl,
getTokenType(), getAuthToken(), getSecretToken(), clientConfigProperties);
if (sudoUserId != null) {
gitLabApi.apiClient.setSudoAsId(sudoUserId);
}
if (getIgnoreCertificateErrors()) {
gitLabApi.setIgnoreCertificateErrors(true);
}
gitLabApi.defaultPerPage = this.defaultPerPage;
return (gitLabApi);
} } | public class class_name {
public final GitLabApi duplicate() {
Integer sudoUserId = this.getSudoAsId();
GitLabApi gitLabApi = new GitLabApi(apiVersion, gitLabServerUrl,
getTokenType(), getAuthToken(), getSecretToken(), clientConfigProperties);
if (sudoUserId != null) {
gitLabApi.apiClient.setSudoAsId(sudoUserId); // depends on control dependency: [if], data = [(sudoUserId]
}
if (getIgnoreCertificateErrors()) {
gitLabApi.setIgnoreCertificateErrors(true); // depends on control dependency: [if], data = [none]
}
gitLabApi.defaultPerPage = this.defaultPerPage;
return (gitLabApi);
} } |
public class class_name {
public GetDomainSummaryStatisticsResponse getDomainSummaryStatistics(GetDomainSummaryStatisticsRequest request) {
checkNotNull(request, "The parameter request should NOT be null.");
checkStringNotEmpty(request.getStartTime(), "startTime should NOT be empty.");
InternalRequest internalRequest = createRequest(HttpMethodName.GET, request,
STATISTICS, LIVE_DOMAIN, "summary");
internalRequest.addParameter("startTime", request.getStartTime());
if (request.getEndTime() != null) {
internalRequest.addParameter("endTime", request.getEndTime());
}
return invokeHttpClient(internalRequest, GetDomainSummaryStatisticsResponse.class);
} } | public class class_name {
public GetDomainSummaryStatisticsResponse getDomainSummaryStatistics(GetDomainSummaryStatisticsRequest request) {
checkNotNull(request, "The parameter request should NOT be null.");
checkStringNotEmpty(request.getStartTime(), "startTime should NOT be empty.");
InternalRequest internalRequest = createRequest(HttpMethodName.GET, request,
STATISTICS, LIVE_DOMAIN, "summary");
internalRequest.addParameter("startTime", request.getStartTime());
if (request.getEndTime() != null) {
internalRequest.addParameter("endTime", request.getEndTime()); // depends on control dependency: [if], data = [none]
}
return invokeHttpClient(internalRequest, GetDomainSummaryStatisticsResponse.class);
} } |
public class class_name {
@SuppressWarnings({"JavaDoc"})
private void addParentDirs( ArchiveEntry archiveEntry, File baseDir, String entry, ZipArchiveOutputStream zOut,
String prefix )
throws IOException
{
if ( !doFilesonly && getIncludeEmptyDirs() )
{
Stack<String> directories = new Stack<String>();
// Don't include the last entry itself if it's
// a dir; it will be added on its own.
int slashPos = entry.length() - ( entry.endsWith( "/" ) ? 1 : 0 );
while ( ( slashPos = entry.lastIndexOf( '/', slashPos - 1 ) ) != -1 )
{
String dir = entry.substring( 0, slashPos + 1 );
if ( addedDirs.contains( prefix + dir ) )
{
break;
}
directories.push( dir );
}
while ( !directories.isEmpty() )
{
String dir = directories.pop();
File f;
if ( baseDir != null )
{
f = new File( baseDir, dir );
}
else
{
f = new File( dir );
}
// the
// At this point we could do something like read the atr
final PlexusIoResource res = new AnonymousResource( f);
zipDir( res, zOut, prefix + dir, archiveEntry.getDefaultDirMode() );
}
}
} } | public class class_name {
@SuppressWarnings({"JavaDoc"})
private void addParentDirs( ArchiveEntry archiveEntry, File baseDir, String entry, ZipArchiveOutputStream zOut,
String prefix )
throws IOException
{
if ( !doFilesonly && getIncludeEmptyDirs() )
{
Stack<String> directories = new Stack<String>();
// Don't include the last entry itself if it's
// a dir; it will be added on its own.
int slashPos = entry.length() - ( entry.endsWith( "/" ) ? 1 : 0 );
while ( ( slashPos = entry.lastIndexOf( '/', slashPos - 1 ) ) != -1 )
{
String dir = entry.substring( 0, slashPos + 1 );
if ( addedDirs.contains( prefix + dir ) )
{
break;
}
directories.push( dir ); // depends on control dependency: [while], data = [none]
}
while ( !directories.isEmpty() )
{
String dir = directories.pop();
File f;
if ( baseDir != null )
{
f = new File( baseDir, dir ); // depends on control dependency: [if], data = [( baseDir]
}
else
{
f = new File( dir ); // depends on control dependency: [if], data = [none]
}
// the
// At this point we could do something like read the atr
final PlexusIoResource res = new AnonymousResource( f);
zipDir( res, zOut, prefix + dir, archiveEntry.getDefaultDirMode() ); // depends on control dependency: [while], data = [none]
}
}
} } |
public class class_name {
DependencyInfo getDependencyInfo() {
if (dependencyInfo == null) {
dependencyInfo = generateDependencyInfo();
}
if (!extraRequires.isEmpty() || !extraProvides.isEmpty()) {
dependencyInfo =
SimpleDependencyInfo.builder(getName(), getName())
.setProvides(concat(dependencyInfo.getProvides(), extraProvides))
.setRequires(concat(dependencyInfo.getRequires(), extraRequires))
.setTypeRequires(dependencyInfo.getTypeRequires())
.setLoadFlags(dependencyInfo.getLoadFlags())
.setHasExternsAnnotation(dependencyInfo.getHasExternsAnnotation())
.setHasNoCompileAnnotation(dependencyInfo.getHasNoCompileAnnotation())
.build();
extraRequires.clear();
extraProvides.clear();
}
return dependencyInfo;
} } | public class class_name {
DependencyInfo getDependencyInfo() {
if (dependencyInfo == null) {
dependencyInfo = generateDependencyInfo(); // depends on control dependency: [if], data = [none]
}
if (!extraRequires.isEmpty() || !extraProvides.isEmpty()) {
dependencyInfo =
SimpleDependencyInfo.builder(getName(), getName())
.setProvides(concat(dependencyInfo.getProvides(), extraProvides))
.setRequires(concat(dependencyInfo.getRequires(), extraRequires))
.setTypeRequires(dependencyInfo.getTypeRequires())
.setLoadFlags(dependencyInfo.getLoadFlags())
.setHasExternsAnnotation(dependencyInfo.getHasExternsAnnotation())
.setHasNoCompileAnnotation(dependencyInfo.getHasNoCompileAnnotation())
.build(); // depends on control dependency: [if], data = [none]
extraRequires.clear(); // depends on control dependency: [if], data = [none]
extraProvides.clear(); // depends on control dependency: [if], data = [none]
}
return dependencyInfo;
} } |
public class class_name {
private void sawInitOpcode(int seen) {
boolean isSyncCollection = false;
try {
stack.mergeJumps(this);
isSyncCollection = isSyncCollectionCreation(seen);
if (seen == Const.PUTFIELD) {
processCollectionStore();
}
} finally {
stack.sawOpcode(this, seen);
if (isSyncCollection && (stack.getStackDepth() > 0)) {
OpcodeStack.Item item = stack.getStackItem(0);
item.setUserValue(Boolean.TRUE);
}
}
} } | public class class_name {
private void sawInitOpcode(int seen) {
boolean isSyncCollection = false;
try {
stack.mergeJumps(this); // depends on control dependency: [try], data = [none]
isSyncCollection = isSyncCollectionCreation(seen); // depends on control dependency: [try], data = [none]
if (seen == Const.PUTFIELD) {
processCollectionStore(); // depends on control dependency: [if], data = [none]
}
} finally {
stack.sawOpcode(this, seen);
if (isSyncCollection && (stack.getStackDepth() > 0)) {
OpcodeStack.Item item = stack.getStackItem(0);
item.setUserValue(Boolean.TRUE); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public static Method findMethod(Object obj, String methodName, Object... params) {
Class<?>[] classArray = new Class<?>[params.length];
for (int i = 0; i < params.length; i++) {
classArray[i] = params[i].getClass();
}
try {
//look on all the public, protected, default and private methods of the class hierarchy.
Class<?> klass = obj.getClass();
while (klass != null) {
for (Method method : klass.getDeclaredMethods()) {
//check if the method name matches along with the number of parameters
if (method.getName().equals(methodName) && method.getParameterCount() == classArray.length) {
Class<?>[] paramClasses = method.getParameterTypes();
//Then check one by one all the provided parameters and see if they match the defined ones.
boolean parametersMatch = true;
for(int i = 0; i < params.length; i++) {
if(!paramClasses[i].isAssignableFrom(classArray[i])) {
parametersMatch = false;
break;
}
}
if(parametersMatch) { //exact match, return the method
return method;
}
}
}
klass = klass.getSuperclass();
}
//nothing found
throw new NoSuchMethodException();
}
catch (NoSuchMethodException ex) {
throw new RuntimeException(ex);
}
} } | public class class_name {
public static Method findMethod(Object obj, String methodName, Object... params) {
Class<?>[] classArray = new Class<?>[params.length];
for (int i = 0; i < params.length; i++) {
classArray[i] = params[i].getClass();
}
try {
//look on all the public, protected, default and private methods of the class hierarchy.
Class<?> klass = obj.getClass();
while (klass != null) {
for (Method method : klass.getDeclaredMethods()) {
//check if the method name matches along with the number of parameters
if (method.getName().equals(methodName) && method.getParameterCount() == classArray.length) {
Class<?>[] paramClasses = method.getParameterTypes();
//Then check one by one all the provided parameters and see if they match the defined ones.
boolean parametersMatch = true;
for(int i = 0; i < params.length; i++) {
if(!paramClasses[i].isAssignableFrom(classArray[i])) {
parametersMatch = false; // depends on control dependency: [if], data = [none]
break;
}
}
if(parametersMatch) { //exact match, return the method
return method; // depends on control dependency: [if], data = [none]
}
}
}
klass = klass.getSuperclass(); // depends on control dependency: [while], data = [none]
}
//nothing found
throw new NoSuchMethodException();
}
catch (NoSuchMethodException ex) {
throw new RuntimeException(ex);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public synchronized Set<RegistrationPoint> getRegistrationPoints() {
Set<RegistrationPoint> result = new HashSet<>();
for (List<RegistrationPoint> registrationPoints : registrationPoints.values()) {
result.addAll(registrationPoints);
}
return Collections.unmodifiableSet(result);
} } | public class class_name {
public synchronized Set<RegistrationPoint> getRegistrationPoints() {
Set<RegistrationPoint> result = new HashSet<>();
for (List<RegistrationPoint> registrationPoints : registrationPoints.values()) {
result.addAll(registrationPoints); // depends on control dependency: [for], data = [registrationPoints]
}
return Collections.unmodifiableSet(result);
} } |
public class class_name {
public static boolean containsOnlyDigits(final CharSequence string) {
int size = string.length();
for (int i = 0; i < size; i++) {
char c = string.charAt(i);
if (!CharUtil.isDigit(c)) {
return false;
}
}
return true;
} } | public class class_name {
public static boolean containsOnlyDigits(final CharSequence string) {
int size = string.length();
for (int i = 0; i < size; i++) {
char c = string.charAt(i);
if (!CharUtil.isDigit(c)) {
return false; // depends on control dependency: [if], data = [none]
}
}
return true;
} } |
public class class_name {
@SuppressWarnings("unchecked")
protected void setParameters(PreparedStatement stmt, Object[] params) throws SQLException {
for (int i = 0; i < params.length; i++) {
if(params[i] == null){
stmt.setObject(i + 1, null);
} else {
Class<?> propertType = params[i].getClass();
@SuppressWarnings("rawtypes")
ValueType valueType = MirageUtil.getValueType(propertType, null, dialect, valueTypes);
if(valueType != null){
valueType.set(propertType, stmt, params[i], i + 1);
} else {
if(logger.isWarnEnabled()) {
logger.warn("valueType for " + propertType.getName() + " not found.");
}
}
}
}
} } | public class class_name {
@SuppressWarnings("unchecked")
protected void setParameters(PreparedStatement stmt, Object[] params) throws SQLException {
for (int i = 0; i < params.length; i++) {
if(params[i] == null){
stmt.setObject(i + 1, null);
} else {
Class<?> propertType = params[i].getClass();
@SuppressWarnings("rawtypes")
ValueType valueType = MirageUtil.getValueType(propertType, null, dialect, valueTypes);
if(valueType != null){
valueType.set(propertType, stmt, params[i], i + 1);
// depends on control dependency: [if], data = [none]
} else {
if(logger.isWarnEnabled()) {
logger.warn("valueType for " + propertType.getName() + " not found.");
// depends on control dependency: [if], data = [none]
}
}
}
}
} } |
public class class_name {
public static boolean verifyDir(File dir,Logger logger){
if (dir==null){
logger.error("El directorio es nulo.");
return false;
}
String fileName=dir.getAbsolutePath();
if (fileName==null){
return false;
}
if (!dir.exists()){
logger.error("El path '"+fileName+"' no existe.");
return false;
}
if (!dir.isDirectory()){
logger.error("El path '"+fileName+"' no es un directorio.");
return false;
}
if (!dir.canRead()){
logger.error("No tenemos permisos de lectura en el path '"+fileName+"'.");
return false;
}
if (!dir.canWrite()){
logger.error("No tenemos permisos de escritura en el path '"+fileName+"'.");
return false;
}
return true;
} } | public class class_name {
public static boolean verifyDir(File dir,Logger logger){
if (dir==null){
logger.error("El directorio es nulo.");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
String fileName=dir.getAbsolutePath();
if (fileName==null){
return false;
// depends on control dependency: [if], data = [none]
}
if (!dir.exists()){
logger.error("El path '"+fileName+"' no existe.");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
if (!dir.isDirectory()){
logger.error("El path '"+fileName+"' no es un directorio.");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
if (!dir.canRead()){
logger.error("No tenemos permisos de lectura en el path '"+fileName+"'.");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
if (!dir.canWrite()){
logger.error("No tenemos permisos de escritura en el path '"+fileName+"'.");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
return true;
} } |
public class class_name {
@Override
public String getRevisionId() {
if (isReload(REVISION_ID_RELOAD)) {
setReload(REVISION_ID_RELOAD);
sa.setRevisionId(read().getRevisionId());
}
return sa.getRevisionId();
} } | public class class_name {
@Override
public String getRevisionId() {
if (isReload(REVISION_ID_RELOAD)) {
setReload(REVISION_ID_RELOAD); // depends on control dependency: [if], data = [none]
sa.setRevisionId(read().getRevisionId()); // depends on control dependency: [if], data = [none]
}
return sa.getRevisionId();
} } |
public class class_name {
public static PluralRules of(
Locale locale,
NumberType numType
) {
Map<String, PluralRules> map = getRuleMap(numType);
PluralRules rules = null;
if (!map.isEmpty()) {
String region = FormatUtils.getRegion(locale);
if (!region.isEmpty()) {
rules = map.get(toKey(locale.getLanguage(), region));
}
if (rules == null) {
rules = map.get(locale.getLanguage());
}
}
if (rules == null) {
rules = Holder.PROVIDER.load(locale, numType);
}
return rules;
} } | public class class_name {
public static PluralRules of(
Locale locale,
NumberType numType
) {
Map<String, PluralRules> map = getRuleMap(numType);
PluralRules rules = null;
if (!map.isEmpty()) {
String region = FormatUtils.getRegion(locale);
if (!region.isEmpty()) {
rules = map.get(toKey(locale.getLanguage(), region)); // depends on control dependency: [if], data = [none]
}
if (rules == null) {
rules = map.get(locale.getLanguage()); // depends on control dependency: [if], data = [none]
}
}
if (rules == null) {
rules = Holder.PROVIDER.load(locale, numType); // depends on control dependency: [if], data = [none]
}
return rules;
} } |
public class class_name {
public static String termValue(String term) {
int i = term.indexOf(MtasToken.DELIMITER);
String value = null;
if (i >= 0) {
value = term.substring((i + MtasToken.DELIMITER.length()));
value = (value.length() > 0) ? value : null;
}
return (value == null) ? null : value.replace("\u0000", "");
} } | public class class_name {
public static String termValue(String term) {
int i = term.indexOf(MtasToken.DELIMITER);
String value = null;
if (i >= 0) {
value = term.substring((i + MtasToken.DELIMITER.length())); // depends on control dependency: [if], data = [(i]
value = (value.length() > 0) ? value : null; // depends on control dependency: [if], data = [none]
}
return (value == null) ? null : value.replace("\u0000", "");
} } |
public class class_name {
public EClass getNewRevisionAdded() {
if (newRevisionAddedEClass == null) {
newRevisionAddedEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(LogPackage.eNS_URI)
.getEClassifiers().get(10);
}
return newRevisionAddedEClass;
} } | public class class_name {
public EClass getNewRevisionAdded() {
if (newRevisionAddedEClass == null) {
newRevisionAddedEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(LogPackage.eNS_URI)
.getEClassifiers().get(10);
// depends on control dependency: [if], data = [none]
}
return newRevisionAddedEClass;
} } |
public class class_name {
protected void startEngine() {
if (m_engineState != CmsPublishEngineState.ENGINE_STARTED) {
m_engineState = CmsPublishEngineState.ENGINE_STARTED;
// start publish job if jobs waiting
if ((m_currentPublishThread == null) && !m_publishQueue.isEmpty()) {
checkCurrentPublishJobThread();
}
}
} } | public class class_name {
protected void startEngine() {
if (m_engineState != CmsPublishEngineState.ENGINE_STARTED) {
m_engineState = CmsPublishEngineState.ENGINE_STARTED; // depends on control dependency: [if], data = [none]
// start publish job if jobs waiting
if ((m_currentPublishThread == null) && !m_publishQueue.isEmpty()) {
checkCurrentPublishJobThread(); // depends on control dependency: [if], data = [none]
}
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.